code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE CPP, BangPatterns, InstanceSigs, RankNTypes, ScopedTypeVariables, TypeFamilies, UndecidableInstances #-}
-- | Continuation-passing parser for context-free grammars that keeps track of the parsed prefix length
module Text.Grampa.ContextFree.Continued.Measured (Parser(..), Result(..), alt) where
import Control.Applicative (Applicative(..), Alternative(..), liftA2)
import Control.Monad (Monad(..), MonadPlus(..))
#if MIN_VERSION_base(4,13,0)
import Control.Monad (MonadFail(fail))
#endif
import Data.Functor.Classes (Show1(..))
import Data.Functor.Compose (Compose(..))
import Data.Semigroup (Semigroup(..))
import Data.Monoid (Monoid(mappend, mempty))
import Data.Monoid.Factorial(FactorialMonoid)
import Data.Monoid.Textual(TextualMonoid)
import Data.String (fromString)
import Debug.Trace (trace)
import Witherable (Filterable(mapMaybe))
import qualified Data.Monoid.Factorial as Factorial
import qualified Data.Monoid.Null as Null
import qualified Data.Monoid.Textual as Textual
import qualified Data.Semigroup.Cancellative as Cancellative
import qualified Rank2
import qualified Text.Parser.Char
import Text.Parser.Char (CharParsing)
import Text.Parser.Combinators (Parsing(..))
import Text.Parser.LookAhead (LookAheadParsing(..))
import Text.Parser.Input.Position (fromEnd)
import Text.Grampa.Class (CommittedParsing(..), DeterministicParsing(..),
InputParsing(..), InputCharParsing(..), ConsumedInputParsing(..),
MultiParsing(..), ParseResults, ParseFailure(..), FailureDescription(..), Pos)
import Text.Grampa.Internal (expected, erroneous, TraceableParsing(..))
data Result (g :: (* -> *) -> *) s v = Parsed{parsedPrefix :: !v,
parsedSuffix :: !s}
| NoParse (ParseFailure Pos s)
-- | Parser type for context-free grammars that uses a continuation-passing algorithm, fast for grammars in LL(1)
-- class but with potentially exponential performance for longer ambiguous prefixes.
newtype Parser (g :: (* -> *) -> *) s r =
Parser{applyParser :: forall x. s -> (r -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x}
instance Show s => Show1 (Result g s) where
liftShowsPrec showsPrecSub _showList prec Parsed{parsedPrefix= r} rest = "Parsed " ++ showsPrecSub prec r rest
liftShowsPrec _showsPrec _showList _prec (NoParse f) rest = "NoParse " ++ shows f rest
instance Functor (Result g s) where
fmap f (Parsed a rest) = Parsed (f a) rest
fmap _ (NoParse failure) = NoParse failure
instance Functor (Parser g s) where
fmap f (Parser p) = Parser (\input success-> p input (success . f))
{-# INLINABLE fmap #-}
instance Applicative (Parser g s) where
pure a = Parser (\input success failure-> success a 0 input failure)
(<*>) :: forall a b. Parser g s (a -> b) -> Parser g s a -> Parser g s b
Parser p <*> Parser q = Parser r where
r :: forall x. s -> (b -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest (\f len rest'-> q rest' (\a len'-> success (f a) $! len + len')) failure
{-# INLINABLE (<*>) #-}
instance (Factorial.FactorialMonoid s, Ord s) => Alternative (Parser g s) where
empty = Parser (\rest _ failure-> failure $ ParseFailure (fromEnd $ Factorial.length rest) [] [])
(<|>) = alt
-- | A named and unconstrained version of the '<|>' operator
alt :: forall g s a. Ord s => Parser g s a -> Parser g s a -> Parser g s a
Parser p `alt` Parser q = Parser r where
r :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest success' failure'
where success' a len rest' _ = success a len rest' failure'
failure' f1 = q rest success (\f2 -> failure (f1 <> f2))
instance Factorial.FactorialMonoid s => Filterable (Parser g s) where
mapMaybe :: forall a b. (a -> Maybe b) -> Parser g s a -> Parser g s b
mapMaybe f (Parser p) = Parser q where
q :: forall x. s -> (b -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q rest success failure = p rest (maybe filterFailure success . f) failure
where filterFailure _ _ _ = failure (expected (fromEnd $ Factorial.length rest) "filter")
{-# INLINABLE mapMaybe #-}
#if MIN_VERSION_base(4,13,0)
instance Monad (Parser g s) where
#else
instance Factorial.FactorialMonoid s => Monad (Parser g s) where
#endif
return = pure
(>>=) :: forall a b. Parser g s a -> (a -> Parser g s b) -> Parser g s b
Parser p >>= f = Parser r where
r :: forall x. s -> (b -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest
(\a len rest'-> applyParser (f a) rest' $ \b len'-> success b $! len + len')
failure
#if MIN_VERSION_base(4,13,0)
instance Factorial.FactorialMonoid s => MonadFail (Parser g s) where
#endif
fail msg = Parser (\rest _ failure-> failure $ erroneous (fromEnd $ Factorial.length rest) msg)
instance (Factorial.FactorialMonoid s, Ord s) => MonadPlus (Parser g s) where
mzero = empty
mplus = (<|>)
instance Semigroup x => Semigroup (Parser g s x) where
(<>) = liftA2 (<>)
instance Monoid x => Monoid (Parser g s x) where
mempty = pure mempty
mappend = liftA2 mappend
instance (Factorial.FactorialMonoid s, Ord s) => Parsing (Parser g s) where
try :: forall a. Parser g s a -> Parser g s a
try (Parser p) = Parser q
where q :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success (failure . rewindFailure)
where rewindFailure ParseFailure{} = ParseFailure (fromEnd $ Factorial.length input) [] []
(<?>) :: forall a. Parser g s a -> String -> Parser g s a
Parser p <?> msg = Parser q
where q :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success (failure . replaceFailure)
where replaceFailure (ParseFailure pos msgs erroneous') =
ParseFailure pos (if pos == fromEnd (Factorial.length input) then [StaticDescription msg]
else msgs) erroneous'
eof = Parser p
where p rest success failure
| Null.null rest = success () 0 rest failure
| otherwise = failure (expected (fromEnd $ Factorial.length rest) "end of input")
unexpected msg = Parser (\t _ failure -> failure $ erroneous (fromEnd $ Factorial.length t) msg)
notFollowedBy (Parser p) = Parser q
where q :: forall x. s -> (() -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success' failure'
where success' _ _ _ _ = failure (expected (fromEnd $ Factorial.length input) "notFollowedBy")
failure' _ = success () 0 input failure
instance (Factorial.FactorialMonoid s, Ord s) => DeterministicParsing (Parser g s) where
(<<|>) :: forall a. Parser g s a -> Parser g s a -> Parser g s a
Parser p <<|> Parser q = Parser r where
r :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
r rest success failure = p rest success' failure'
where success' a len rest' _ = success a len rest' failure
failure' f1 = q rest success (\f2 -> failure (f1 <> f2))
takeSome p = (:) <$> p <*> takeMany p
takeMany p = takeSome p <<|> pure []
instance (FactorialMonoid s, Ord s) => CommittedParsing (Parser g s) where
type CommittedResults (Parser g s) = ParseResults s
commit :: forall a. Parser g s a -> Parser g s (ParseResults s a)
commit (Parser p) = Parser q
where q :: forall x. s -> (ParseResults s a -> Int -> s -> (ParseFailure Pos s -> x) -> x)
-> (ParseFailure Pos s -> x) -> x
q input success failure = p input (success . Right) failure'
where failure' f = success (Left f) 0 input failure
admit :: forall a. Parser g s (ParseResults s a) -> Parser g s a
admit (Parser p) = Parser q
where q :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success' failure
where success' (Left f) _len _rest = const (failure f)
success' (Right a) len rest = success a len rest
instance (Factorial.FactorialMonoid s, Ord s) => LookAheadParsing (Parser g s) where
lookAhead :: forall a. Parser g s a -> Parser g s a
lookAhead (Parser p) = Parser q
where q :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q input success failure = p input success' failure'
where success' a _ _ = success a 0 input
failure' f = failure f
instance (Show s, Ord s, TextualMonoid s) => CharParsing (Parser g s) where
satisfy predicate = Parser p
where p :: forall x. s -> (Char -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Textual.splitCharacterPrefix rest
of Just (first, suffix) | predicate first -> success first 1 suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "Char.satisfy")
string s = Textual.toString (error "unexpected non-character") <$> string (fromString s)
text t = (fromString . Textual.toString (error "unexpected non-character")) <$> string (Textual.fromText t)
instance (Cancellative.LeftReductive s, Factorial.FactorialMonoid s, Ord s) => InputParsing (Parser g s) where
type ParserInput (Parser g s) = s
getInput = Parser p
where p rest success failure = success rest 0 rest failure
anyToken = Parser p
where p rest success failure =
case Factorial.splitPrimePrefix rest
of Just (first, suffix) -> success first 1 suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "anyToken")
satisfy predicate = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Factorial.splitPrimePrefix rest
of Just (first, suffix) | predicate first -> success first 1 suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "satisfy")
notSatisfy predicate = Parser p
where p :: forall x. s -> (() -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Factorial.splitPrimePrefix rest
of Just (first, _)
| predicate first -> failure (expected (fromEnd $ Factorial.length rest) "notSatisfy")
_ -> success () 0 rest failure
scan :: forall state. state -> (state -> s -> Maybe state) -> Parser g s s
scan s0 f = Parser (p s0)
where p :: forall x. state -> s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p s rest success failure = success prefix len suffix failure
where (prefix, suffix, _) = Factorial.spanMaybe' s f rest
!len = Factorial.length prefix
take n = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Factorial.splitAt n rest,
len <- Factorial.length prefix, len == n = success prefix len suffix failure
| otherwise = failure (expected (fromEnd $ Factorial.length rest) $ "take " ++ show n)
takeWhile predicate = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Factorial.span predicate rest,
!len <- Factorial.length prefix =
success prefix len suffix failure
takeWhile1 predicate = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Factorial.span predicate rest,
!len <- Factorial.length prefix =
if len == 0
then failure (expected (fromEnd $ Factorial.length rest) "takeWhile1")
else success prefix len suffix failure
string s = Parser p where
p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p s' success failure
| Just suffix <- Cancellative.stripPrefix s s', !len <- Factorial.length s = success s len suffix failure
| otherwise = failure (ParseFailure (fromEnd $ Factorial.length s') [LiteralDescription s] [])
{-# INLINABLE string #-}
instance (Cancellative.LeftReductive s, FactorialMonoid s, Ord s) => ConsumedInputParsing (Parser g s) where
match :: forall a. Parser g s a -> Parser g s (s, a)
match (Parser p) = Parser q
where q :: forall x. s -> ((s, a) -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q rest success failure = p rest success' failure
where success' r !len suffix failure' = success (Factorial.take len rest, r) len suffix failure'
instance InputParsing (Parser g s) => TraceableParsing (Parser g s) where
traceInput :: forall a. (s -> String) -> Parser g s a -> Parser g s a
traceInput description (Parser p) = Parser q
where q :: forall x. s -> (a -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
q rest success failure = traceWith "Parsing " (p rest success' failure')
where traceWith prefix = trace (prefix <> description rest)
failure' f = traceWith "Failed " (failure f)
success' r !len suffix failure'' = traceWith "Parsed " (success r len suffix failure'')
instance (Ord s, Show s, TextualMonoid s) => InputCharParsing (Parser g s) where
satisfyCharInput predicate = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Textual.splitCharacterPrefix rest
of Just (first, suffix) | predicate first -> success (Factorial.primePrefix rest) 1 suffix failure
_ -> failure (expected (fromEnd $ Factorial.length rest) "satisfyChar")
notSatisfyChar predicate = Parser p
where p :: forall x. s -> (() -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure =
case Textual.characterPrefix rest
of Just first | predicate first
-> failure (expected (fromEnd $ Factorial.length rest) "notSatisfyChar")
_ -> success () 0 rest failure
scanChars :: forall state. state -> (state -> Char -> Maybe state) -> Parser g s s
scanChars s0 f = Parser (p s0)
where p :: forall x. state -> s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p s rest success failure = success prefix len suffix failure
where (prefix, suffix, _) = Textual.spanMaybe_' s f rest
!len = Factorial.length prefix
takeCharsWhile predicate = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| (prefix, suffix) <- Textual.span_ False predicate rest,
!len <- Factorial.length prefix = success prefix len suffix failure
takeCharsWhile1 predicate = Parser p
where p :: forall x. s -> (s -> Int -> s -> (ParseFailure Pos s -> x) -> x) -> (ParseFailure Pos s -> x) -> x
p rest success failure
| Null.null prefix = failure (expected (fromEnd $ Factorial.length rest) "takeCharsWhile1")
| otherwise = len `seq` success prefix len suffix failure
where (prefix, suffix) = Textual.span_ False predicate rest
len = Factorial.length prefix
-- | Continuation-passing context-free parser that keeps track of the parsed prefix length
--
-- @
-- 'parseComplete' :: ("Rank2".'Rank2.Functor' g, 'FactorialMonoid' s) =>
-- g (Continued.'Parser' g s) -> s -> g ('ParseResults' s)
-- @
instance (Cancellative.LeftReductive s, Factorial.FactorialMonoid s, Ord s) => MultiParsing (Parser g s) where
type ResultFunctor (Parser g s) = ParseResults s
-- | Returns an input prefix parse paired with the remaining input suffix.
parsePrefix g input = Rank2.fmap (Compose . (\p-> applyParser p input (\a _ rest _-> Right (rest, a)) Left))
g
parseComplete g input = Rank2.fmap (\p-> applyParser p input (const . const . const . Right) Left)
(Rank2.fmap (<* eof) g)
| blamario/grampa | grammatical-parsers/src/Text/Grampa/ContextFree/Continued/Measured.hs | bsd-2-clause | 17,520 | 0 | 19 | 4,821 | 6,710 | 3,444 | 3,266 | -1 | -1 |
import Text.ParserCombinators.Parsec
x (Right a) = a
x (Left _) = 0
prs = x . (parse r "!")
blue = do
char 'a'
char 'b'
n <- many (char 'a')
return $ 2 + (2 * (length n))
green = do
char 'b'
b <- blue
return $ 2 + b
red = do
b <- blue
g <- green
return $ 3 + b + g
r = red
tst = do
str <- getLine
let res = prs str
putStrLn (if res == 0 then "IMPOSSIBLE" else (show res))
main = do
t <- getLine
mapM_ (const tst) [1 .. (read t)]
| pbl64k/CodeSprints | CodeNinja-2012-03-31-Amazon/MagicMachines/mm.hs | bsd-2-clause | 571 | 4 | 11 | 247 | 280 | 129 | 151 | 25 | 2 |
-- |Converts types to contracts.
module BrownPLT.TypedJS.Contracts
( toContract
, toInterface
, encapsulate
, encapsulateTypedModule
, getContractsLib
) where
import BrownPLT.TypedJS.Prelude
import qualified Data.Map as M
import System.FilePath
import Paths_TypedJavaScript -- generated by Cabal
import BrownPLT.JavaScript.Contracts (Contract (..), getContractLibraryPath,
InterfaceItem (..), compile)
import BrownPLT.TypedJS.Types (Env)
import BrownPLT.TypedJS.Syntax (Type (..))
import qualified BrownPLT.JavaScript as JavaScript
pos = noPos
-- |'toContract' assumes that the supplied type is closed and well-formed.
toContract :: Type -> Contract
{-toContract (TFunc (this:reqargs) maybeVararg result _) =
FunctionContract noPos (map toContract reqargs) varargCtc (toContract result)
where varargCtc = case maybeVararg of
Nothing -> Nothing
Just vararg -> Just (toContract vararg)-}
toContract (TId id) =
error $ "unbound identifier (" ++ show id ++ ") at " ++ show pos ++
" while converting a type to a contract"
-- TODO: This should not be hard-coded. Fix when we enable
-- user-defined types. Also add nullable contract.
toContract (TApp constr args) = case (constr,args) of
("bool",[]) -> NamedContract pos "isBool"
("int",[]) -> NamedContract pos "isInt"
("string",[]) -> NamedContract pos "isString"
(constr,args) -> error $ "toContract does not know " ++ show (constr,args)
--toContract (TNullable pos t) = FlatContract pos JavaScript
toContract x = error $ "toContract does not handle " ++ show x
--TODO: add arrayOf for objects with @[] in them.
-- |Converts a type environment (presumably, a top-level type-environment)
-- to a list of 'InterfaceItem's.
toInterface :: Env
-> [InterfaceItem]
toInterface env = map toExport (M.toList env) where
toExport (v,t) = InterfaceExport v noPos
(toContract' t)
toContract' Nothing = error "Contracts.hs : export without type"
toContract' (Just (type_, _, _)) = toContract type_
-- |Wraps a Typed JavaScript module (after type-erasure) with contracts. The
-- result is a single function-application statement.
encapsulate :: [JavaScript.Statement SourcePos] -- ^type-erased tJS
-> Env -- ^environment (i.e. exports)
-> [JavaScript.Statement SourcePos] -- ^contract library
-> JavaScript.Statement SourcePos -- ^encapsulated module
encapsulate typeErasedStmts env contractLib = wrappedStmts where
interface = toInterface env
wrappedStmts = compile typeErasedStmts interface contractLib
getContractsLib = do
contractLib <- getContractLibraryPath
dataDir <- getDataDir
let typedContractLib = dataDir</>"typedjs_contracts.js"
contractLibStmts <- JavaScript.parseJavaScriptFromFile contractLib
typedContractLibStmts <- JavaScript.parseJavaScriptFromFile typedContractLib
return $ contractLibStmts ++ typedContractLibStmts
encapsulateTypedModule :: [JavaScript.Statement SourcePos]
-> Env
-> IO (JavaScript.Statement SourcePos)
encapsulateTypedModule typeErasedStmts env = do
lib <- getContractsLib
return $ encapsulate typeErasedStmts env lib
| brownplt/strobe-old | src/BrownPLT/TypedJS/Contracts.hs | bsd-2-clause | 3,249 | 0 | 10 | 636 | 610 | 332 | 278 | 53 | 4 |
module Main where
import Database.TinkerPop
import Database.TinkerPop.Types
import Data.Text
import Data.Aeson
import Data.Aeson.Types
import qualified Data.HashMap.Strict as H
import Control.Concurrent
import Control.Monad
import Control.Monad.Trans (liftIO)
import Control.Lens hiding ((.=), (.:))
import Data.Aeson.Lens
main :: IO ()
main = do
run "localhost" 8182 $ \conn -> do
-- DROP Database
submit conn "g.V().drop()" Nothing >>= print
let addV = "graph.addVertex(label, 'language', 'name', n)"
-- add 'haskell' vertex
haskell <- submit conn addV (Just $ H.fromList ["l" .= ("language" :: Text), "n" .= ("haskell" :: Text)])
print haskell
let idHaskell = getId haskell
-- add (library) vertexes
yesod <- submit conn addV (Just $ H.fromList ["l" .= ("library" :: Text), "n" .= ("yesod" :: Text)])
print yesod
let idYesod = getId yesod
idAeson <- getId <$> submit conn addV (Just $ H.fromList ["l" .= ("library" :: Text), "n" .= ("aeson" :: Text)])
idLens <- getId <$> submit conn addV (Just $ H.fromList ["l" .= ("library" :: Text), "n" .= ("lens" :: Text)])
-- add (library -written-> language) edge
mapM (\lib -> submit conn "g.V(from).next().addEdge('written', g.V(to).next())" (Just $ H.fromList ["from" .= lib, "to" .= idHaskell])) [idYesod, idHaskell, idLens] >>= print
-- query
submit conn "g.V().has('name', 'haskell').in('written').values()" Nothing >>= print
where getId = (^? _Right . element 0 . key "id" . _Integer)
| nakaji-dayo/gremlin-haskell | examples/Main.hs | bsd-3-clause | 1,590 | 0 | 20 | 363 | 502 | 272 | 230 | 28 | 1 |
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
module Dang.TypeCheck.Monad (
TC(), runTC,
Subst.Unify, unify,
Subst.Zonk, zonk, ftvs,
) where
import Dang.Monad (Dang)
import Dang.TypeCheck.AST (TVar)
import qualified Dang.TypeCheck.Subst as Subst
import Control.Applicative (Alternative(..))
import Control.Monad (MonadPlus(..))
import qualified Data.Set as Set
import MonadLib (BaseM(..),StateT,get,set,runM)
data RW = RW { rwSubst :: !Subst.Subst
}
newtype TC a = TC { unTC :: StateT RW Dang a
} deriving (Functor,Applicative,Monad)
runTC :: TC a -> Dang a
runTC (TC m) = fst `fmap` runM m RW { rwSubst = Subst.emptySubst }
instance BaseM TC Dang where
inBase m = TC (inBase m)
{-# INLINE inBase #-}
instance Alternative TC where
empty = TC empty
a <|> b = TC (unTC a <|> unTC b)
{-# INLINE empty #-}
{-# INLINE (<|>) #-}
instance MonadPlus TC where
mzero = TC mzero
mplus a b = TC (unTC a `mplus` unTC b)
{-# INLINE mzero #-}
{-# INLINE mplus #-}
-- | Unify two things that have types, and update the internal state.
unify :: Subst.Unify a => a -> a -> TC ()
unify a b = TC $
do RW { .. } <- get
su' <- Subst.unify rwSubst a b
set $! RW { rwSubst = su' }
-- | Remove type variables from a thing that has types.
--
-- NOTE: this will fail if the type given is infinite.
zonk :: Subst.Zonk a => a -> TC a
zonk a = TC $
do RW { .. } <- get
Subst.zonk rwSubst a
-- | Calculate the free variables of a thing that has types.
--
-- NOTE: this will fail with if the type given is infinite.
ftvs :: Subst.Zonk a => a -> TC (Set.Set TVar)
ftvs a = TC $
do RW { .. } <- get
Subst.ftvs rwSubst a
| elliottt/dang | src/Dang/TypeCheck/Monad.hs | bsd-3-clause | 1,815 | 0 | 10 | 468 | 566 | 309 | 257 | 50 | 1 |
{-# LANGUAGE ImplicitParams #-}
module Internal.TranSpec(Goal(..),
FairRegion(..),
Transition(..),
TranSpec(..)) where
import Text.PrettyPrint
import PP
import Internal.IExpr
import Internal.CFA
data Transition = Transition { tranFrom :: Loc
, tranTo :: Loc
, tranCFA :: CFA
}
instance PP Transition where
pp (Transition from to cfa) = text "transition" <+>
(braces' $ text "init:" <+> pp from
$+$
text "final:" <+> pp to
$+$
pp cfa)
instance Show Transition where
show = render . pp
data Goal = Goal { goalName :: String
, goalCond :: Transition
}
instance PP Goal where
pp (Goal n c) = text "goal" <+> pp n <+> char '=' <+> pp c
data FairRegion = FairRegion { fairName :: String
, fairCond :: Expr
}
instance PP FairRegion where
pp (FairRegion n c) = text "fair" <+> pp n <+> char '=' <+> pp c
data TranSpec = TranSpec { tsCTran :: [Transition]
, tsUTran :: [Transition]
, tsInit :: (Transition, Expr) -- initial state constraint (constraint_on_spec_variables,constraints_on_aux_variables)
, tsGoal :: [Goal]
, tsFair :: [FairRegion] -- sets of states f s.t. GF(-f)
}
instance PP TranSpec where
pp s = (vcat $ map (($+$ text "") . pp) (tsCTran s))
$+$
(vcat $ map (($+$ text "") . pp) (tsUTran s))
$+$
(text "init: " <+> (pp $ fst $ tsInit s))
$+$
(text "aux_init: " <+> (pp $ snd $ tsInit s))
$+$
(vcat $ map (($+$ text "") . pp) (tsGoal s))
$+$
(vcat $ map (($+$ text "") . pp) (tsFair s))
| termite2/tsl | Internal/TranSpec.hs | bsd-3-clause | 2,121 | 0 | 17 | 976 | 579 | 315 | 264 | 46 | 0 |
--file : Main.hs
--date : 17/01/18
--author : mi-na
--rational :
--TODO: restrict the member 'special' s kind
--export module
module FieldD ( Node(..)
, NodeInfo
, getMap
, getNextNode
, getNodeInfo
) where
--implementation(Node)
--data type
--public data (its method is called from outside modules)
data Node = Node { myId :: Int
, nexts :: [Int]
, special :: [String]
} deriving (Show)
--XXX: whether two nodes are the same or not depends only on their id
instance Eq Node where
a == b
| myId a == myId b = True
| otherwise = False
--synonim
type InitializeInfo = ([Int], [String])
type NodeInfo = (Int, [Int], [String])
getMap :: [InitializeInfo] -> [Node]
getMap initInfo = generateMap 1 initInfo []
getNextNode :: Int -> [Node] -> Node
getNextNode distNum (x:xs)
| distNum == myId x = x
| otherwise = getNextNode distNum xs
getNodeInfo :: Node -> NodeInfo
getNodeInfo n = (myId n, nexts n, special n)
--implementation(helper)
generate :: Int -> [Int] -> [String] -> Node
generate idVal nextNodes specialInfo = Node { myId=idVal
, nexts=nextNodes
, special=specialInfo }
generateMap :: Int -> [InitializeInfo] -> [Node] -> [Node]
generateMap _ [] nList = nList
generateMap idVal initInfo nList = generateMap (idVal+1) rest $ nList ++ [newNode]
where rest = tail initInfo
initData = head initInfo
newNode = generate idVal (fst initData) (snd initData)
| mi-na/scotland_yard | src/FieldD.hs | bsd-3-clause | 1,660 | 0 | 10 | 531 | 459 | 256 | 203 | 33 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Generics.Sum.Internal.Constructors
-- Copyright : (C) 2020 Csongor Kiss
-- License : BSD3
-- Maintainer : Csongor Kiss <[email protected]>
-- Stability : experimental
-- Portability : non-portable
--
-- Derive constructor-name-based prisms generically.
--
-----------------------------------------------------------------------------
module Data.Generics.Sum.Internal.Constructors
( GAsConstructor (..)
, GAsConstructor'
, Context'
, Context
, Context_
, Context0
, derived0
) where
import Data.Generics.Internal.Families
import Data.Generics.Internal.Errors
import Data.Generics.Product.Internal.HList
import GHC.Generics
import GHC.TypeLits (Symbol)
import Data.Kind (Constraint, Type)
import Data.Generics.Internal.Profunctor.Iso
import Data.Generics.Internal.Profunctor.Prism
import GHC.TypeLits (TypeError, ErrorMessage (..))
derived0 :: forall ctor s t a b. Context0 ctor s t a b => Prism s t a b
derived0 = repIso . _GCtor @ctor
{-# INLINE derived0 #-}
type Context' ctor s a
= ( Context0 ctor s s a a
, ErrorUnless ctor s (HasCtorP ctor (Rep s)))
class Context (ctor :: Symbol) s t a b | ctor s -> a, ctor t -> b
instance
( ErrorUnless ctor s (HasCtorP ctor (Rep s))
, GAsConstructor' ctor (Rep s) a -- TODO: add a test similar to #62 for prisms
, GAsConstructor' ctor (Rep (Indexed s)) a'
, GAsConstructor ctor (Rep s) (Rep t) a b
, t ~ Infer s a' b
, GAsConstructor' ctor (Rep (Indexed t)) b'
, s ~ Infer t b' a
) => Context ctor s t a b
class Context_ (ctor :: Symbol) s t a b
instance
( ErrorUnless ctor s (HasCtorP ctor (Rep s))
, GAsConstructor' ctor (Rep s) a -- TODO: add a test similar to #62 for prisms
, GAsConstructor' ctor (Rep (Indexed s)) a'
, GAsConstructor ctor (Rep s) (Rep t) a b
, GAsConstructor' ctor (Rep (Indexed t)) b'
, UnifyHead s t
, UnifyHead t s
) => Context_ ctor s t a b
type Context0 ctor s t a b
= ( Generic s
, Generic t
, GAsConstructor ctor (Rep s) (Rep t) a b
, Defined (Rep s)
(NoGeneric s '[ 'Text "arising from a generic prism focusing on the "
':<>: QuoteType ctor ':<>: 'Text " constructor of type " ':<>: QuoteType a
, 'Text "in " ':<>: QuoteType s])
(() :: Constraint)
)
type family ErrorUnless (ctor :: Symbol) (s :: Type) (contains :: Bool) :: Constraint where
ErrorUnless ctor s 'False
= TypeError
( 'Text "The type "
':<>: 'ShowType s
':<>: 'Text " does not contain a constructor named "
':<>: 'ShowType ctor
)
ErrorUnless _ _ 'True
= ()
--------------------------------------------------------------------------------
-- |As 'AsConstructor' but over generic representations as defined by
-- "GHC.Generics".
class GAsConstructor (ctor :: Symbol) s t a b | ctor s -> a, ctor t -> b where
_GCtor :: Prism (s x) (t x) a b
type GAsConstructor' ctor s a = GAsConstructor ctor s s a a
instance
( GIsList f g as bs
, ListTuple a b as bs
) => GAsConstructor ctor (M1 C ('MetaCons ctor fixity fields) f) (M1 C ('MetaCons ctor fixity fields) g) a b where
_GCtor = mIso . glist . tupled
{-# INLINE _GCtor #-}
instance GSumAsConstructor ctor (HasCtorP ctor l) l r l' r' a b => GAsConstructor ctor (l :+: r) (l' :+: r') a b where
_GCtor = _GSumCtor @ctor @(HasCtorP ctor l)
{-# INLINE _GCtor #-}
instance GAsConstructor ctor f f' a b => GAsConstructor ctor (M1 D meta f) (M1 D meta f') a b where
_GCtor = mIso . _GCtor @ctor
{-# INLINE _GCtor #-}
class GSumAsConstructor (ctor :: Symbol) (contains :: Bool) l r l' r' a b | ctor l r -> a, ctor l' r' -> b where
_GSumCtor :: Prism ((l :+: r) x) ((l' :+: r') x) a b
instance GAsConstructor ctor l l' a b => GSumAsConstructor ctor 'True l r l' r a b where
_GSumCtor = left . _GCtor @ctor
{-# INLINE _GSumCtor #-}
instance GAsConstructor ctor r r' a b => GSumAsConstructor ctor 'False l r l r' a b where
_GSumCtor = right . _GCtor @ctor
{-# INLINE _GSumCtor #-}
| kcsongor/generic-lens | generic-lens-core/src/Data/Generics/Sum/Internal/Constructors.hs | bsd-3-clause | 4,631 | 1 | 15 | 1,043 | 1,378 | 755 | 623 | -1 | -1 |
-- modified from https://github.com/Daniel-Diaz/matrix for my specific use.
module Common.Matrix.Matrix (
Matrix,
rows, cols,
fmap,
(!), (!.), getElem, safeGet, unsafeGet,
getRow, getCol,
fromList, toList, fromLists, toLists,
create,
zero, identity, scalar,
add, subtract, multiply,
power
) where
import Prelude hiding (subtract, fmap)
import Data.Bits (Bits, shiftR, (.&.))
import Data.Maybe (fromMaybe)
import qualified Data.Vector.Unboxed as V
import qualified Data.Vector as RV
data V.Unbox a => Matrix a = Matrix {
rows :: {-# UNPACK #-} !Int,
cols :: {-# UNPACK #-} !Int,
vect :: V.Vector a
} deriving (Eq, Show)
encode :: Int -> (Int, Int) -> Int
{-# INLINE encode #-}
encode m (i, j) = (i - 1) * m + j - 1
fmap :: V.Unbox a => (a -> a) -> Matrix a -> Matrix a
{-# INLINE fmap #-}
fmap f (Matrix r c v) = Matrix r c $ V.map f v
getElem :: V.Unbox a => Int -> Int -> Matrix a -> a
{-# INLINE getElem #-}
getElem i j m = fromMaybe (error "getElem: out of bound.") (safeGet i j m)
(!) :: V.Unbox a => Matrix a -> (Int, Int) -> a
{-# INLINE (!) #-}
m ! (i, j) = getElem i j m
(!.) :: V.Unbox a => Matrix a -> (Int, Int) -> a
{-# INLINE (!.) #-}
m !. (i, j) = unsafeGet i j m
safeGet :: V.Unbox a => Int -> Int -> Matrix a -> Maybe a
{-# INLINE safeGet #-}
safeGet i j m@(Matrix r c _)
| i < 1 || j < 1 || i > r || j > c = Nothing
| otherwise = Just $ unsafeGet i j m
unsafeGet :: V.Unbox a => Int -> Int -> Matrix a -> a
{-# INLINE unsafeGet #-}
unsafeGet i j (Matrix _ c v) = V.unsafeIndex v $ encode c (i, j)
getRow :: V.Unbox a => Int -> Matrix a -> V.Vector a
{-# INLINE getRow #-}
getRow i (Matrix _ m v) = V.slice (m * (i - 1)) m v
getCol :: V.Unbox a => Int -> Matrix a -> V.Vector a
{-# INLINE getCol #-}
getCol j (Matrix n m v) = V.generate n $ \i -> v V.! encode m (i + 1, j)
create :: V.Unbox a => Int -> Int -> ((Int, Int) -> a) -> Matrix a
{-# INLINE create #-}
create n m f = Matrix n m $ V.fromList [ f (i, j) | i <- [1 .. n], j <- [1 .. m] ]
fromList :: V.Unbox a => Int -> Int -> [a] -> Matrix a
{-# INLINE fromList #-}
fromList n m = Matrix n m . V.fromListN (n * m)
fromLists :: V.Unbox a => [[a]] -> Matrix a
{-# INLINE fromLists #-}
fromLists [] = error "fromLists: empty list."
fromLists (xs:xss) = fromList n m $ concat $ xs : map (take m) xss where
n = 1 + length xss
m = length xs
toList :: V.Unbox a => Matrix a -> [a]
{-# INLINE toList #-}
toList m@(Matrix r c _) = [ unsafeGet i j m | i <- [1 .. r] , j <- [1 .. c] ]
toLists :: V.Unbox a => Matrix a -> [[a]]
{-# INLINE toLists #-}
toLists m@(Matrix r c _) = [ [ unsafeGet i j m | j <- [1 .. c] ] | i <- [1 .. r] ]
zero :: (V.Unbox a, Num a) => Int -> Int -> Matrix a
{-# INLINE zero #-}
zero n m = Matrix n m $ V.replicate (n * m) 0
scalar :: (V.Unbox a, Num a) => Int -> a -> Matrix a
{-# INLINE scalar #-}
scalar n x = create n n $ \(i, j) -> if i == j then x else 0
identity :: (V.Unbox a, Num a) => Int -> Matrix a
{-# INLINE identity #-}
identity n = scalar n 1
add :: (V.Unbox a, Num a) => Matrix a -> Matrix a -> Matrix a
{-# INLINE add #-}
add (Matrix r1 c1 v1) (Matrix r2 c2 v2)
| r1 == r2 && c1 == c2 = Matrix r1 c1 $ V.zipWith (+) v1 v2
| otherwise = error "add: matrix size not match."
subtract :: (V.Unbox a, Num a) => Matrix a -> Matrix a -> Matrix a
{-# INLINE subtract #-}
subtract (Matrix r1 c1 v1) (Matrix r2 c2 v2)
| r1 == r2 && c1 == c2 = Matrix r1 c1 $ V.zipWith (-) v1 v2
| otherwise = error "subtract: matrix size not match."
multiply :: (V.Unbox a, Num a) => Matrix a -> Matrix a -> Matrix a
{-# INLINE multiply #-}
multiply m1@(Matrix _ c _) m2@(Matrix r _ _)
| c == r = multiply' m1 m2
| otherwise = error "multiply: matrix size not match."
multiply' :: (V.Unbox a, Num a) => Matrix a -> Matrix a -> Matrix a
{-# INLINE multiply' #-}
multiply' m1@(Matrix r _ _) m2@(Matrix _ c _) = create r c $ \(i, j) -> dotProduct (RV.unsafeIndex avs $ i - 1) (RV.unsafeIndex bvs $ j - 1) where
avs = RV.generate r $ \i -> getRow (i + 1) m1
bvs = RV.generate c $ \i -> getCol (i + 1) m2
dotProduct v1 v2 = V.foldl' (+) 0 $ V.zipWith (*) v1 v2
power :: (Integral a, Bits a, V.Unbox b, Num b) => Matrix b -> a -> Matrix b
{-# INLINE power #-}
power m@(Matrix r c _) p
| r == c = helper m p $ identity r
| otherwise = error "power: matrix not squared."
where
helper _ 0 ret = ret
helper a x ret = if (x .&. 1) == 1
then helper a' x' (multiply' ret a)
else helper a' x' ret where
a' = multiply' a a
x' = x `shiftR` 1
| foreverbell/project-euler-solutions | lib/Common/Matrix/Matrix.hs | bsd-3-clause | 4,656 | 0 | 14 | 1,239 | 2,256 | 1,174 | 1,082 | 109 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- |
-- Module : Data.Array.Nikola.Language.Optimize
-- Copyright : (c) Geoffrey Mainland 2012
-- License : BSD-style
--
-- Maintainer : Geoffrey Mainland <[email protected]>
-- Stability : experimental
-- Portability : non-portable
module Data.Array.Nikola.Language.Optimize
( optimizeHostProgram
, liftHostProgram
, mergeParfor
, whenE
, bind
, binds
, subst
) where
import Prelude hiding (mapM)
import Control.Applicative (Applicative, (<$>), (<*>), pure)
import Control.Monad ((>=>))
import Control.Monad.State (StateT(..), evalStateT,
MonadState(..), gets, modify)
import Control.Monad.Trans (MonadIO(..))
import Data.Foldable
import Data.Int
import Data.Monoid
import Data.List (foldl1')
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Traversable
import Text.PrettyPrint.Mainland
import Data.Array.Nikola.Backend.Flags
import Data.Array.Nikola.Exp hiding (fromIntegral, Var, Exp)
import qualified Data.Array.Nikola.Exp as E
import Data.Array.Nikola.Language.Check
import Data.Array.Nikola.Language.Generic
import Data.Array.Nikola.Language.Monad
import Data.Array.Nikola.Language.Optimize.CSE
import Data.Array.Nikola.Language.Optimize.Inliner
import Data.Array.Nikola.Language.Optimize.Monad
import Data.Array.Nikola.Language.Optimize.Occ
import Data.Array.Nikola.Language.Optimize.Simpl
import Data.Array.Nikola.Language.Optimize.Subst
import Data.Array.Nikola.Language.Syntax
-- Host program optimization. Some passes rely on normalized monadic structure,
-- so we perform an initial normalization pass and then do another final
-- normalization pass after other optimizations have had a chance to run.
optimizeHostProgram :: Exp -> R r Exp
optimizeHostProgram =
return
>=> pprStage "Start"
>=> oPass (norm ExpA)
-- >=> whenDialect CUDA (oPass (mergeParfor ExpA))
>=> oPass (shareBindings ExpA)
>=> aPass (mergeBounds ExpA)
>=> oPass (norm ExpA)
>=> pprStage "Pre-simplify pass 1"
>=> oPass (simpl ExpA)
>=> pprStage "Simplify pass 1"
>=> cse ExpA
>=> pprStage "CSE pass 1"
>=> oPass (occ ExpA)
>=> pprStage "Occ pass 1"
>=> oPass (inliner ExpA)
>=> pprStage "Inliner pass 1"
>=> oPass (norm ExpA)
>=> pprStage "Pre-simplify pass 2"
>=> oPass (simpl ExpA)
>=> pprStage "Simplify pass 2"
>=> cse ExpA
>=> pprStage "CSE pass 2"
>=> oPass (occ ExpA)
>=> oPass (inliner ExpA)
>=> pprStage "Inliner pass 2"
>=> whenDialect CUDA (constructKernels ExpA)
>=> oPass (lambdaLift ExpA)
liftHostProgram :: Exp -> R r Exp
liftHostProgram =
return
>=> oPass (lambdaLift ExpA)
whenDialect :: Dialect
-> (Exp -> R r Exp)
-> Exp
-> R r Exp
whenDialect dialect f p = do
flags <- getFlags
if (fromLJust fDialect flags == dialect)
then f p
else return p
oPass :: (Exp -> O Exp) -> Exp -> R r Exp
oPass f = liftIO . evalO . f
aPass :: (Exp -> A Exp) -> Exp -> R r Exp
aPass f = liftIO . evalA . f
pprIO :: MonadIO m => Doc -> m ()
pprIO = liftIO . putStrLn . pretty 200
pprStage :: MonadIO m => String -> Exp -> m Exp
pprStage desc e =
if False
then do pprIO $ nest 4 $ text desc </> line </> ppr e
return e
else return e
-- Deciding whether or not things must be equal ("must be equal" implies
-- "equal", but "equal" does not imply "must be equal")
infix 4 ==!
class MustEq a where
(==!) :: a -> a -> Bool
instance MustEq a => MustEq [a] where
[] ==! [] = True
(x:xs) ==! (y:ys) = x ==! y && xs ==! ys
_ ==! _ = False
instance MustEq Exp where
ConstE c1 ==! ConstE c2 = c1 == c2
VarE v1 ==! VarE v2 = v1 == v2
UnitE ==! UnitE = True
DimE i1 n1 e1 ==! DimE i2 n2 e2 = i1 == i2 && n1 == n2 && e1 == e2
LetE v1 tau1 _ e1a e1b ==! LetE v2 tau2 _ e2a e2b = v1 == v2 && tau1 == tau2 && e1a ==! e2a && e1b ==! e2b
LamE vtaus1 e1 ==! LamE vtaus2 e2 = vtaus1 == vtaus2 && e1 ==! e2
AppE e1 es1 ==! AppE e2 es2 = e1 ==! e2 && es1 ==! es2
UnopE op1 e1 ==! UnopE op2 e2 = op1 == op2 && e1 ==! e2
BinopE op1 e1a e1b ==! BinopE op2 e2a e2b = op1 == op2 && e1a ==! e2a && e1b ==! e2b
IfThenElseE e1a e1b e1c ==! IfThenElseE e2a e2b e2c = e1a ==! e2a && e1b ==! e2b && e1c ==! e2c
IndexE v1 idx1 ==! IndexE v2 idx2 = v1 == v2 && idx1 ==! idx2
_ ==! _ = False
-- Kernel construction
constructKernels :: AST a -> a -> R r a
constructKernels = go
where
go :: AST a -> a -> R r a
go ExpA e@(ForE forloop _ _) | isParFor forloop = do
return (CallE (LamE [] e) [])
go w a = checkTraverseFam go w a
-- Lift shared bindings out of branches
shareBindings :: forall a m . (MonadSubst Var Var m, MonadSubst Var Exp m) => AST a -> a -> m a
shareBindings VarA v = lookupSubst VarA v VarA (return v)
shareBindings ExpA (VarE v) = lookupSubst VarA v ExpA (VarE <$> shareBindings VarA v)
shareBindings ExpA (IfThenElseE test (LetE v1 tau1 occ1 e1a e1b) (LetE v2 tau2 occ2 e2a e2b)) = do
e1a' <- shareBindings ExpA e1a
e2a' <- shareBindings ExpA e2a
if e1a' == e2a'
then do insertSubst VarA v2 VarA v1
LetE v1 tau1 (occJoin occ1 occ2) e1a' <$> shareBindings ExpA (IfThenElseE test e1b e2b)
else do e1 <- LetE <$> shareBindings VarA v1
<*> shareBindings TypeA tau1
<*> pure occ1
<*> pure e1a'
<*> shareBindings ExpA e1b
e2 <- LetE <$> shareBindings VarA v2
<*> shareBindings TypeA tau2
<*> pure occ2
<*> pure e2a'
<*> shareBindings ExpA e2b
IfThenElseE <$> shareBindings ExpA test <*> pure e1 <*> pure e2
shareBindings w a = traverseFam shareBindings w a
-- Merge parallel for loops
mergeParfor :: forall a m . (MonadSubst Var Var m, MonadSubst Var Exp m) => AST a -> a -> m a
mergeParfor VarA v = lookupSubst VarA v VarA (return v)
mergeParfor ExpA (VarE v) = lookupSubst VarA v ExpA (VarE <$> mergeParfor VarA v)
mergeParfor ExpA (LamE vtaus p) = do
LamE vtaus <$> (fromMnf <$> go (toMnf p))
where
go :: Mnf Exp -> m (Mnf Exp)
go [] =
return []
go ((seq1, ForE ParFor [(v1, e1)] p1) : (seq2, ForE ParFor [(v2, e2)] p2) : ms) = do
insertSubst VarA v2 VarA v1
let p1' = whenE ((E . VarE) v1 <* (E e1 :: E.Exp t Int32)) p1
let p2' = whenE ((E . VarE) v1 <* (E e2 :: E.Exp t Int32)) p2
go ((seq1, ForE ParFor [(v1, BinopE MaxO e1 e2)] (sync p1' p2')) : ms)
where
sync = case seq2 of
SeqM -> syncE
ParM -> seqE
_ -> error "mergeParfor: saw bind between parallel fors"
go ((s,m) : ms) = do
m' <- mergeParfor ExpA m
ms' <- go ms
return $ (s,m') : ms'
mergeParfor w a = traverseFam mergeParfor w a
whenE :: E.Exp t a -> Exp -> Exp
whenE e p = IfThenElseE (unE e) p (ReturnE UnitE)
-- Convert monadic actions to a normal form
data SeqM = LetM Var Type
| SeqM
| ParM
| BindM Var Type
instance Pretty SeqM where
ppr (LetM v tau) = text "let" <+> ppr v <+> text "::" <+> ppr tau <+>
text "="
ppr SeqM = text ">>"
ppr ParM = text "||"
ppr (BindM v tau) = text ">>=" <+> text "\\" <+> ppr v <+> text "::" <+> ppr tau <+> text "->"
type Mnf a = [(SeqM, a)]
instance Pretty a => Pretty (Mnf a) where
ppr [] = empty
ppr ((LetM v tau, e):ms) = text "let" <+> ppr v <+> text "::" <+> ppr tau <+>
text "=" <+> ppr e </> ppr ms
ppr ((s, m):ms) = ppr m </> ppr s </> ppr ms
toMnf :: Exp -> Mnf Exp
toMnf (LetE v tau _ e m) = (LetM v tau, e) : toMnf m
toMnf (SeqE (ReturnE {}) m) = toMnf m
toMnf (SeqE (SeqE m1 m2) m3) = toMnf (SeqE m1 (SeqE m2 m3))
toMnf (SeqE (ParE m1 m2) m3) = toMnf (SeqE m1 (ParE m2 m3))
toMnf (SeqE (BindE v tau m1 m2) m3) = toMnf (BindE v tau m1 (SeqE m2 m3))
toMnf (SeqE m1 m2) = (SeqM, m1) : toMnf m2
toMnf (ParE (ReturnE {}) m) = toMnf m
toMnf (ParE (SeqE m1 m2) m3) = toMnf (ParE m1 (SeqE m2 m3))
toMnf (ParE (ParE m1 m2) m3) = toMnf (ParE m1 (ParE m2 m3))
toMnf (ParE (BindE v tau m1 m2) m3) = toMnf (BindE v tau m1 (ParE m2 m3))
toMnf (ParE m1 m2) = (ParM, m1) : toMnf m2
toMnf (BindE v tau (SeqE m1 m2) m3) = toMnf (SeqE m1 (BindE v tau m2 m3))
toMnf (BindE v tau (ParE m1 m2) m3) = toMnf (ParE m1 (BindE v tau m2 m3))
toMnf (BindE v2 tau2
(BindE v1 tau1 m1 m2) m3) = toMnf (BindE v1 tau1 m1 (BindE v2 tau2 m2 m3))
toMnf (BindE v tau m1 m2) = (BindM v tau, m1) : toMnf m2
toMnf m = [(SeqM, m)]
fromMnf :: Mnf Exp -> Exp
fromMnf [] = error "fromMnf: empty list"
fromMnf [(LetM {}, _)] = error "fromMnf: last action is a let"
fromMnf [(SeqM, m)] = m
fromMnf [(ParM, _)] = error "fromMnf: last action is a par"
fromMnf [(BindM {}, _)] = error "fromMnf: last action is a bind"
fromMnf ((LetM v tau, e) :ms) = LetE v tau Many e $ fromMnf ms
fromMnf ((SeqM, m) :ms) = SeqE m $ fromMnf ms
fromMnf ((ParM, m) :ms) = ParE m $ fromMnf ms
fromMnf ((BindM v tau, m):ms) = BindE v tau m $ fromMnf ms
-- Normalize monadic actions
norm :: (MonadCheck m, MonadSubst Var Var m, MonadSubst Var Exp m) => AST a -> a -> m a
norm VarA v = lookupSubst VarA v VarA (return v)
norm ExpA (VarE v) = lookupSubst VarA v ExpA (VarE <$> norm VarA v)
norm ExpA (SeqE (ReturnE {}) m) = norm ExpA m
norm ExpA (SeqE (SeqE m1 m2) m3) = norm ExpA (SeqE m1 (SeqE m2 m3))
norm ExpA (ParE (ParE m1 m2) m3) = norm ExpA (ParE m1 (ParE m2 m3))
norm ExpA (SeqE (BindE v tau m1 m2) m3) = norm ExpA (BindE v tau m1 (SeqE m2 m3))
norm ExpA (SeqE m1 (ReturnE UnitE)) = do m1' <- norm ExpA m1
tau <- inferExp m1' >>= checkMT
if tau == unitT
then norm ExpA m1
else norm ExpA $ SeqE m1' (ReturnE UnitE)
norm ExpA (BindE v _ (ReturnE e) m) = do insertSubst VarA v ExpA e
norm ExpA m
norm ExpA (BindE v tau (SeqE m1 m2) m3) = norm ExpA (SeqE m1 (BindE v tau m2 m3))
norm ExpA (BindE v2 tau2 (BindE v1 tau1 m1 m2) m3) = norm ExpA (BindE v1 tau1 m1 (BindE v2 tau2 m2 m3))
norm ExpA (CallE (LamE [] (ReturnE e)) []) = do m <- ReturnE <$> norm ExpA e
norm ExpA m
norm w a = checkTraverseFam norm w a
-- Lambda-lift kernels
lambdaLift :: forall m a . MonadCheck m => AST a -> a -> m a
lambdaLift ExpA (CallE (LamE vtaus m) es) = do
let vs' = Set.toList (fvs ExpA m)
taus' <- mapM lookupVarType vs'
return $ CallE (LamE (vtaus ++ vs' `zip` taus') m)
(es ++ map VarE vs')
lambdaLift w a =
checkTraverseFam lambdaLift w a
-- Free variables
fvs :: AST a -> a -> Set Var
fvs w a = fst (vars w a Set.empty)
vars :: Fold AST (Set Var -> (Set Var, Set Var))
vars = go
where
go :: Fold AST (Set Var -> (Set Var, Set Var))
go VarA v = useVar v
go ExpA (LetE v _ _ e1 e2) = go ExpA e1 `mappend`
bindVar v (go ExpA e2)
go ExpA (LamE vtaus e) = bindVars (map fst vtaus) (go ExpA e)
go ExpA (BindE v _ p1 p2) = go ExpA p1 `mappend`
bindVar v (go ExpA p2)
go ExpA (ForE _ loopvs p) = foldMap (go ExpA) es `mappend`
bindVars vs (go ExpA p)
where
(vs, es) = unzip loopvs
go w a = foldFam go w a
useVar :: Var -> (Set Var -> (Set Var, Set Var))
useVar v = \bound -> (if v `Set.member` bound then mempty else Set.singleton v, mempty)
bindVar :: Var
-> (Set Var -> (Set Var, Set Var))
-> (Set Var -> (Set Var, Set Var))
bindVar v m = \bound -> m (Set.insert v bound)
bindVars :: [Var]
-> (Set Var -> (Set Var, Set Var))
-> (Set Var -> (Set Var, Set Var))
bindVars vs m = \bound -> m (bound `Set.union` Set.fromList vs)
bind :: MonadSubst a b m
=> AST a
-> AST b
-> a
-> (a -> m c)
-> m c
bind wa wb v kont = do
old_phi <- getsTheta wa wb phi
old_theta <- getsTheta wa wb theta
a <- if v `Set.member` old_phi
then do let v' = uniqBinder v old_phi
modifyTheta wa wb $ \s -> s { phi = Set.insert v' old_phi
, theta = Map.insert v (binderOcc v') old_theta
}
kont v'
else do modifyTheta wa wb $ \s -> s { phi = Set.insert v (phi s)
, theta = Map.delete v old_theta
}
kont v
modifyTheta wa wb $ \s -> s { phi = old_phi
, theta = old_theta
}
return a
binds :: forall m a b c . MonadSubst a b m
=> AST a
-> AST b
-> [a]
-> ([a] -> m c)
-> m c
binds _ _ [] kont = kont []
binds wa wb (v:vs) kont = do
bind wa wb v $ \v' -> do
binds wa wb vs $ \vs' -> do
kont (v':vs')
-- Substitution
subst :: forall m a b . MonadSubst a b m => AST a -> AST b -> forall c . AST c -> c -> m c
subst = go
where
go :: AST a -> AST b -> AST c -> c -> m c
go VarA VarA VarA v = lookupSubst VarA v VarA (return v)
go VarA ExpA ExpA e@(VarE v) = lookupSubst VarA v ExpA (return e)
go VarA w ExpA (LetE v tau occ e1 e2) = do e1' <- go VarA w ExpA e1
bind VarA w v $ \v'-> do
LetE v' tau occ e1' <$> go VarA w ExpA e2
go VarA w ExpA (LamE vtaus e) = do let (vs, taus) = unzip vtaus
binds VarA w vs $ \vs' -> do
LamE (vs' `zip` taus) <$> go VarA w ExpA e
go VarA w ExpA (BindE v tau p1 p2) = do p1' <- go VarA w ExpA p1
bind VarA w v $ \v' -> do
BindE v' tau p1' <$> go VarA w ExpA p2
go VarA w ExpA (ForE floop loopvs p) = do es' <- traverse (go VarA w ExpA) es
binds VarA w vs $ \vs' -> do
ForE floop (vs' `zip` es') <$> go VarA w ExpA p
where
(vs, es) = unzip loopvs
go w1 w2 w a = traverseFam (go w1 w2) w a
-- The StateL monad
newtype StateL s a = StateL { runStateL :: s -> (s, a) }
instance Functor (StateL s) where
fmap f (StateL k) = StateL $ \s ->
let (s', v) = k s
in
(s', f v)
instance Applicative (StateL s) where
pure x = StateL $ \s -> (s, x)
StateL kf <*> StateL kv = StateL $ \s ->
let (s', f) = kf s
(s'', v) = kv s'
in (s'', f v)
instance Monad (StateL s) where
m >>= f = StateL $ \s -> let (s', a) = runStateL m s
in
runStateL (f a) s'
m1 >> m2 = StateL $ \s -> let (s', _) = runStateL m1 s
in
runStateL m2 s'
return x = StateL $ \s -> (s, x)
instance MonadState s (StateL s) where
get = StateL $ \s -> (s, s)
put s = StateL $ \_ -> (s, ())
-- A substitution monad
type Subst a b = StateL (Theta a b)
instance BinderOcc a b => MonadSubst a b (Subst a b) where
getTheta _ _ = get
putTheta s = put s
{-
subst1 :: (MonadCheck m, MonadState REnv m, BinderOcc Var a) => Var -> AST a -> a -> AST b -> b -> m b
subst1 v w1 e w a = do
phi <- gets (Map.keysSet . rVarTypes)
let (_, x) = runStateL (subst VarA w1 w a) (Theta { theta = Map.fromList [(v, e)]
, phi = phi
})
return x
-}
-- Abstract interpretation
data I = I (Maybe Integer) (Set Exp)
deriving (Eq, Ord)
instance Pretty I where
ppr (I Nothing es) =
(parens . commasep . map ppr . Set.toList) es
ppr (I (Just c) es) =
(parens . commasep) (ppr c : (map ppr . Set.toList) es)
iToExp :: I -> Exp
iToExp (I (Just c) vs) =
foldl1' (BinopE MaxO) (ConstE (Int32C (fromIntegral c)) : Set.toList vs)
iToExp (I Nothing vs) =
foldl1' (BinopE MaxO) (Set.toList vs)
-- | A 'Range' represents a range (loop bound) [0,n). Either we know the upper
-- bound precisely, and it is of the form max(c,e1,...,en) where c is a constant
-- and e1...en are expressions, or we don't know the bound, in which case it is
-- T (top).
data Range = Range I
| T
deriving (Eq, Ord)
instance Pretty Range where
ppr (Range i) = text "[0," <+> text "max" <+> ppr i <> char ')'
ppr T = text "[0,+ifty)"
-- | Calculate a range from an expression that specifies the limit of a loop
-- bound.
rangeE :: MonadInterp m Range => Exp -> m Range
rangeE (VarE v) = do
maybe_d <- lookupVar v
case maybe_d of
Just d -> return d
Nothing -> return $ Range $ I Nothing (Set.singleton (VarE v))
rangeE (ConstE (Int32C i)) =
return $ Range $ I (Just (fromIntegral i)) Set.empty
rangeE (BinopE MaxO e1 e2) = do
d1 <- rangeE e1
d2 <- rangeE e2
return $ joinRanges d1 d2
rangeE _ = return $ T
joinRanges :: Range -> Range -> Range
joinRanges (Range (I c1 vs1)) (Range (I c2 vs2)) =
Range $ I (up c1 c2) (vs1 `Set.union` vs2)
where
up Nothing Nothing = Nothing
up (Just c) Nothing = Just c
up Nothing (Just c) = Just c
up (Just c1) (Just c2) = Just (Prelude.max c1 c2)
joinRanges _ _ = T
class (Applicative m, Monad m, MonadIO m) => MonadInterp m a where
lookupVar :: Var -> m (Maybe a)
extendVars :: [(Var, a)] -> m b -> m b
mergeBounds :: forall m . (MonadInterp m Range) => Traversal AST m
mergeBounds ExpA (ForE ParFor loopvs p) = do
(ds, es') <- unzip <$> mapM simplBound loopvs
extendVars (vs `zip` ds) $ do
ForE ParFor (vs `zip` es') <$> (fromMnf <$> go (toMnf p))
where
vs = map fst loopvs
simplBound :: (Var, Exp) -> m (Range, Exp)
simplBound (_, e) = do
e' <- mergeBounds ExpA e
d <- rangeE e'
case d of
Range i -> return (d, iToExp i)
T -> return (d, e')
go :: Mnf Exp -> m (Mnf Exp)
go [] =
return []
go ((seq1, IfThenElseE e1 p1a p1b):(seq2, IfThenElseE e2 p2a p2b):ms)
| e1 ==! e2 && not (isLetM seq1) && not (isLetM seq2) =
go ((seq2, IfThenElseE e1 (p1a `seq'` p2a) (p1b `seq'` p2b)):ms)
where
seq' = case seq1 of
ParM -> parE
_ -> seqE
go ((seq1, IfThenElseE e1 p1a p1b):(_, SyncE):(seq3, IfThenElseE e2 p2a p2b):ms)
| e1 ==! e2 && not (isLetM seq1) && not (isLetM seq3) =
go ((seq3, IfThenElseE e1 (p1a `syncE` p2a) (p1b `syncE` p2b)):ms)
go ((seq1, m@(IfThenElseE (BinopE LtO (VarE v) e) p1 _)):ms)
| not (isLetM seq1) = do
d1 <- lookupVar v
d2 <- Just <$> rangeE e
if d1 == d2
then go ((seq1,p1):ms)
else do m' <- mergeBounds ExpA m
ms' <- go ms
return $ (seq1,m') : ms'
go ((seq1,m) : ms) = do
m' <- mergeBounds ExpA m
ms' <- go ms
return $ (seq1,m') : ms'
isLetM :: SeqM -> Bool
isLetM (LetM {}) = True
isLetM _ = False
mergeBounds w a = traverseFam mergeBounds w a
-- The optimization monad
data AEnv = AEnv
{ aVars:: Map Var Range }
defaultAEnv :: AEnv
defaultAEnv = AEnv { aVars = Map.empty }
newtype A a = A { unA :: StateT AEnv IO a }
deriving (Monad, Functor, Applicative, MonadState AEnv, MonadIO)
evalA :: A a -> IO a
evalA m = evalStateT (unA m) defaultAEnv
instance MonadInterp A Range where
lookupVar v = gets $ \s -> Map.lookup v (aVars s)
extendVars vds act = do
old_vars <- gets aVars
modify $ \s -> s { aVars = foldl' insert (aVars s) vds }
x <- act
modify $ \s -> s { aVars = old_vars }
return x
where
insert m (k, v) = Map.insert k v m
| mainland/nikola | src/Data/Array/Nikola/Language/Optimize.hs | bsd-3-clause | 21,472 | 0 | 31 | 7,641 | 8,473 | 4,264 | 4,209 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : System.Win32.Com
-- Copyright : (c) Sigbjorn Finne, [email protected] 1999
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- Support library for interacting with base Microsoft COM services and API.
--
-----------------------------------------------------------------------------
module System.Win32.Com
(
-- base COM interface, IUnknown:
IUnknown_ -- abstract, instance of: Eq, Show.
, IUnknown
, iidIUnknown -- :: IID (IUnknown ())
, interfaceNULL, isNullInterface, iidNULL
-- its methods:
, queryInterface -- :: IID (IUnknown b) -> IUnknown a -> IO (IUnknown b)
, addRef -- :: IUnknown a -> IO Word32
, release -- :: IUnknown a -> IO Word32
, withQueryInterface -- :: IID (IUnknown b) -> IUnknown a -> (IUnknown b -> IO c) -> IO c
-- helpful operators:
, ( # ) -- :: a -> (a -> IO b) -> IO b
, ( ## ) -- :: IO a -> (a -> IO b) -> IO b
-- setting up and shutting down.
, coRun -- :: IO a -> IO a
, coRunEx
, coPerformIO -- :: IO a -> IO a
, coUnsafePerformIO -- :: IO a -> a
, coInitialize -- :: IO ()
, coInitializeEx -- :: IO ()
, coUnInitialize -- :: IO ()
, coUnInitializeEx -- IO ()
-- GUID API:
, GUID -- abstract, instance of: Eq, Show
, mkGUID -- :: String -> GUID
, newGUID -- :: IO GUID
, stringToGUID -- :: String -> IO GUID
, guidToString -- :: GUID -> String
, nullGUID -- :: GUID
-- IID API:
, IID -- abstract, instance of: Eq, Show
, mkIID -- :: String -> IID a
, stringToIID -- :: String -> IO (IID a)
, guidToIID -- :: GUID -> IID a
, iidToGUID -- :: IID a -> GUID
, castIID -- :: IID a -> IID b
-- CLSID API:
, CLSID -- abstract, instance of: Eq, Show
, mkCLSID -- :: String -> CLSID
, stringToCLSID -- :: String -> IO CLSID
, guidToCLSID -- :: GUID -> CLSID
, clsidToGUID -- :: CLSID -> GUID
, clsidToDisplayName -- :: CLSID -> String
-- LIBID
, LIBID -- (a guid)
, mkLIBID -- :: String -> LIBID
-- HRESULT API:
, HRESULT
, s_FALSE -- :: HRESULT
, s_OK -- :: HRESULT
, succeeded -- :: HRESULT -> Bool
, failed -- :: HRESULT -> Bool
, checkHR -- :: HRESULT -> IO ()
, checkBool -- :: Int32 -> IO ()
, returnHR -- :: IO () -> IO HRESULT
, coFailHR -- :: HRESULT -> IO a
, coFailWithHR -- :: HRESULT -> String -> IO
, coAssert -- :: Bool -> String -> IO ()
, coOnFail -- :: IO a -> String -> IO a
, coFail -- :: String -> IO a
, isCoError -- :: IOError -> Bool
, coGetErrorHR -- :: IOError -> HRESULT
, coGetErrorString -- :: IOError -> String
, hresultToString -- :: HRESULT -> IO String
, ComException(..)
, catchComException
, throwIOComException
, throwComException
-- component creation:
, coCreateInstance -- :: CLSID -> Maybe (IUnknown b) -> CLSCTX
-- -> IID (IUnknown a) -> IO (IUnknown a)
, coCreateObject
, coGetObject
, coGetActiveObject
, coGetFileObject
, coCreateInstanceEx
, COSERVERINFO(..)
, COAUTHIDENTITY(..)
, COAUTHINFO(..)
, withObject -- :: IUnknown a -> [IUnknown a -> IO b] -> IO [b]
, withObject_ -- :: IUnknown a -> [IUnknown a -> IO b] -> IO ()
, withMethod -- :: (a -> IUnknown b -> IO c) -> [a] -> IUnknown b -> IO [c]
, withMethod_ -- :: (a -> IUnknown b -> IO c) -> [a] -> IUnknown b -> IO ()
, CLSCTX(..)
, ProgID
, progIDFromCLSID -- :: CLSID -> IO ProgID
, clsidFromProgID -- :: ProgID -> IO CLSID
, printMessage
, putMessage
, messageBox
, outputDebugString
, OSVersionInfo(..)
, isWindowsNT -- :: OSVersionInfo -> Bool
, isWindows95 -- :: OSVersionInfo -> Bool
, isWindows98 -- :: OSVersionInfo -> Bool
, versionInfo -- :: OSVersionInfo
, ifaceToAddr
-- IEnum* methods.
, enumNext
, enumNextOne
, enumClone
, enumReset
, enumSkip
, BSTR
, marshallBSTR
, unmarshallBSTR
, readBSTR
, writeBSTR
, freeBSTR
, LPSTR
, coFree
, coAlloc
, marshallIUnknown
, unmarshallIUnknown
, readIUnknown
, writeIUnknown
, unmarshallIUnknownFO
, castIface
-- Re-export WideStrings
, WideString
, marshallWideString
, unmarshallWideString
, writeWideString
, readWideString
, sizeofWideString
, freeWideString
-- marshallers
, marshallGUID -- :: GUID -> IO (ForeignPtr GUID)
, unmarshallGUID -- :: Bool -> Ptr GUID -> IO GUID
, writeGUID
, readGUID
, copyGUID
, sizeofGUID
-- marshallers
, marshallIID -- :: GUID -> IO (ForeignPtr GUID)
, unmarshallIID -- :: Bool -> Ptr GUID -> IO GUID
, writeIID
, readIID
, sizeofIID
, copyIID
-- marshallers
, marshallCLSID -- :: CLSID -> IO (ForeignPtr CLSID)
, unmarshallCLSID -- :: Bool -> Ptr CLSID -> IO GUID
, writeCLSID
, readCLSID
, sizeofCLSID
, copyCLSID
, invokeAndCheck
, invokeIt
, loadTypeLib
, loadTypeLibEx
, loadRegTypeLib
, queryPathOfRegTypeLib
, createTypeLib
, LCID
, messagePump
, postQuitMsg
) where
import System.Win32.Com.Exception
import System.Win32.Com.Base
hiding ( coCreateInstance, loadTypeLib, messageBox,
loadTypeLibEx, loadRegTypeLib, coCreateInstanceEx
)
import qualified System.Win32.Com.Base as Base
( coCreateInstance, loadTypeLib, messageBox,
loadTypeLibEx, loadRegTypeLib, coCreateInstanceEx
)
import System.Win32.Com.HDirect.HDirect
import System.Win32.Com.HDirect.Pointer hiding ( freeBSTR )
import qualified System.Win32.Com.HDirect.Pointer as P ( freeBSTR )
import System.Win32.Com.HDirect.WideString
import System.IO.Unsafe ( unsafePerformIO )
import Control.Monad ( when )
import Foreign.StablePtr ( deRefStablePtr )
import Foreign.Ptr
import Foreign.ForeignPtr
import Foreign.Storable
import Foreign.Marshal.Alloc ( allocaBytes )
import Data.Bits
import Control.Exception ( bracket )
infixl 1 #
infixl 0 ##
--Operators to provide OO-looking invocation of interface methods, e.g.,
--
-- ip # meth1 args
-- | The @#@ operator permits /OO-style/ method application with @do@ syntax:
--
-- @
-- obj # method arg1 arg2
-- @
--
-- is equivalent to @method arg1 arg2 obj@, so this assumes that the COM method
-- wrappers takes the /this/ pointer as the last argument. Which the /HDirect/
-- generated wrappers do and the various base method provided by this COM+Automation library.
( # ) :: a -> (a -> IO b) -> IO b
obj # method = method obj
-- | A variation on @(#)@ where the /this/ pointer is an action returning an object reference
-- rather than the reference itself. Sometimes useful when you create one-off objects
-- and call methods on them:
--
-- @
-- (createObject arg1) ## startUp arg2
-- @
--
-- instead of the wieldier,
--
-- @
-- obj <- createObject arg1
-- obj # startUp arg2 or createObject arg1 >>= (startUp arg2)
-- @
--
( ## ) :: IO a -> (a -> IO b) -> IO b
mObj ## method = mObj >>= method
--IPersistFile - doesn't really belong here..
data PersistFile a = PersistFile
type IPersistFile a = IUnknown (PersistFile a)
iidIPersistFile :: IID (IPersistFile ())
iidIPersistFile = mkIID "{0000010B-0000-0000-C000-000000000046}"
-- | @coCreateInstance@ is the basic COM way of creating components. It takes
-- a 'CLSID', an interface to aggregate on, a process context and an IID to
-- create an object:
--
-- @
-- coCreateInstance clsidAgentServer interfaceNULL LocalProcess iidIAgent
-- @
--
coCreateInstance :: CLSID
-> Maybe (IUnknown b)
-> CLSCTX
-> IID (IUnknown a)
-> IO (IUnknown a)
coCreateInstance clsid inner context iid = do
ppvObject <- allocOutPtr
clsid <- marshallCLSID clsid
inner <- marshallInner inner
let ctxt = fromEnum context
iid <- marshallIID iid
Base.coCreateInstance (castForeignPtr clsid) inner (fromIntegral ctxt)
(castForeignPtr iid) ppvObject
doThenFree free (readIUnknown False{-finalise only-}) ppvObject
coCreateInstanceEx :: CLSID
-> Maybe (IUnknown b)
-> CLSCTX
-> Maybe COSERVERINFO
-> IID (IUnknown a)
-> IO (IUnknown a)
coCreateInstanceEx clsid pUnkOuter context mbServ iid = do
clsid <- marshallCLSID clsid
pUnkOuter <- marshallInner pUnkOuter
let ctxt = fromEnum context
iid <- copyGUID (iidToGUID iid)
let mqi = [ MULTI_QI iid nullPtr 0 ]
r <- Base.coCreateInstanceEx (castForeignPtr clsid) pUnkOuter (fromIntegral ctxt) mbServ mqi
case r of
(MULTI_QI iid pItf hr:_) -> do
coFree iid
checkHR hr
unmarshallIUnknown True{-finalise it-} pItf
_ -> coFailHR e_FAIL
marshallInner :: Maybe (IUnknown a) -> IO (ForeignPtr b)
marshallInner Nothing = return nullFO
marshallInner (Just v) = marshallIUnknown v
-- | @createObject@ creates an object from its progID: @createObject "Agent.Server"@.
-- @getObject@ creates an object from its progID and initializes it with a given file:
-- @getObject "spreadsheet.exl" "Excel.Application"@. If the filename is empty,
-- @getObject@ calls @getActiveObject@.
-- @getActiveObject@ tries to connect to an already running instance of the component:
-- @getActiveObject "Word.Application"@.
-- @getFileObject@ opens a file or url and loads the associated or persistent object in it:
-- @getFileObject "spreadsheet.spd"@.
-- @coCreateInstance@ is the basic com way of creating components. It takes
-- a CLSID, process context and IID to create an object:
-- @coCreateInstance clsidAgentServer Nothing LocalProcess iidIAgent@.
--
-- NOTE: prepend @co@ to specify the initial IID, otherwise @iidIDispatch@ is
-- used (see 'System.Win32.Com.Automation' for more).
--
coCreateObject :: ProgID -> IID (IUnknown a) -> IO (IUnknown a)
coCreateObject progid iid = do
clsid <- clsidFromProgID progid
coCreateInstance clsid Nothing AnyProcess iid
-- | Get Object from File and ProgID.
coGetFileObject :: String -> ProgID -> IID (IUnknown a) -> IO (IUnknown a)
coGetFileObject "" progid iid = coGetActiveObject progid iid
coGetFileObject fname progid iid = do
pf <- coCreateObject progid iidIPersistFile
stackWideString fname $ \pfname -> do
persistfileLoad pf pfname 0
pf # queryInterface iid
-- | Look up and activate the given active/running object.
coGetActiveObject :: ProgID -> IID (IUnknown a) -> IO (IUnknown a)
coGetActiveObject progid iid = do
clsid <- clsidFromProgID progid
iface <- primGetActiveObject clsid
`coOnFail` ("Could not connect to component '" ++ progid ++ "'")
iface # queryInterface iid
primGetActiveObject :: CLSID -> IO (IUnknown a)
primGetActiveObject clsid = do
clsid <- marshallCLSID clsid
ppvObject <- allocOutPtr
hr <- getActiveObject (castForeignPtr clsid) nullPtr ppvObject
doThenFree free (readIUnknown False{-finalise only-}) ppvObject
-- | Bind to an object via its /moniker string/ or display name.
coGetObject :: String -> IID (IUnknown a) -> IO (IUnknown a)
coGetObject fname iid = do
stackWideString fname $ \pfname -> do
iid <- marshallIID iid
ppv <- bindObject pfname (castForeignPtr iid)
doThenFree free (readIUnknown False{-finalise only-}) ppv
--COM initialize/uninitialize:
-- | @coRun act@ is the toplevel action combinator to wrap up your
-- COM actions in. Takes care of calling 'coInitialize' (and un-initialize)
-- for you.
coRun :: IO a -> IO a
coRun io = do
coInitialize
v <-
catchComException io
(\ err -> do
when (isCoError err) (putMessage $ coGetErrorString err)
coUnInitialize
throwIOComException err)
coUnInitialize
return v
coRunEx :: IO a -> IO a
coRunEx io = do
coInitializeEx
v <-
catchComException io
(\ err -> do
when (isCoError err) (putMessage $ coGetErrorString err)
coUnInitializeEx
throwIOComException err)
coUnInitializeEx
return v
-- | @coPerformIO act@ runs @act@ within an exception handler that
-- catches and displays any COM API errors in a message box. For debugging
-- purposes, mostly.
coPerformIO :: IO a -> IO a
coPerformIO io =
catchComException io
( \ err -> do
putMessage (coGetErrorString err)
throwIOComException err
)
coUnsafePerformIO :: IO a -> a
coUnsafePerformIO = unsafePerformIO . coPerformIO
-- | @printMessage val@ /show/s @val@ in a message box.
printMessage :: Show a => a -> IO ()
printMessage x = putMessage (show x)
-- | @putMessage str@ displays @str@ in an informational message box containing an OK button.
putMessage :: String -> IO ()
putMessage msg =
stackString msg $ \ _ m ->
stackString "Haskell message" $ \ _ t ->
Base.messageBox m t 0x40040
{- To mere mortals, that's MB_OK | MB_ICONINFORMATION | MB_TOPMOST :-) -}
-- | @messageBox msg title flg@ displays a message box with the given title and content.
-- The @flg@ parameter is the bit pattern that makes up the @MB_*@ settings you want
-- to use (cf. underlying Win32 API documentation for @MessageBox@.)
messageBox :: String -> String -> Word32 -> IO ()
messageBox msg title flg =
stackString msg $ \ _ m ->
stackString title $ \ _ t ->
Base.messageBox m t flg
-- | @outputDebugString str@ adds an
outputDebugString :: String -> IO ()
outputDebugString msg = primOutputDebugString ("haskell-com: " ++ msg ++ "\n")
{-
Really belongs elsewhere...getting info of what kind of
platform we're on.
-}
data OSVersionInfo
= OSVersionInfo Word32 Word32 Word32
isWindowsNT :: OSVersionInfo -> Bool
isWindowsNT (OSVersionInfo _ _ 2{-VER_PLATFORM_WIN32_NT-}) = True
isWindowsNT _ = False
isWindows95 :: OSVersionInfo -> Bool
isWindows95 (OSVersionInfo _ 0 1{-VER_PLATFORM_WIN32_WINDOWS-}) = True
isWindows95 _ = False
isWindows98 :: OSVersionInfo -> Bool
isWindows98 (OSVersionInfo _ x 1{-VER_PLATFORM_WIN32_WINDOWS-}) = x /= 0
isWindows98 _ = False
versionInfo :: OSVersionInfo
versionInfo = unsafePerformIO $ do
(j,n,d) <- primGetVersionInfo
return (OSVersionInfo j n d)
-- | The @CLSCTX@ enumeration is used by @comCreateInstance@ to specify
-- execution context in which we'd like to component to be created
-- (just use @AnyProcess@ if you're not too fussed..)
--
data CLSCTX
= CLSCTX_INPROC_SERVER
| CLSCTX_INPROC_HANDLER
| CLSCTX_LOCAL_SERVER
| CLSCTX_INPROC_SERVER16
| CLSCTX_REMOTE_SERVER
| CLSCTX_INPROC_HANDLER16
| CLSCTX_INPROC_SERVERX86
| CLSCTX_INPROC_HANDLERX86
| LocalProcess
| InProcess
| ServerProcess
| AnyProcess
deriving (Show)
instance Enum CLSCTX where
fromEnum ctx =
case ctx of
CLSCTX_INPROC_SERVER -> 1
CLSCTX_INPROC_HANDLER -> 2
CLSCTX_LOCAL_SERVER -> 4
CLSCTX_INPROC_SERVER16 -> 8
CLSCTX_REMOTE_SERVER -> 16
CLSCTX_INPROC_HANDLER16 -> 32
CLSCTX_INPROC_SERVERX86 -> 64
CLSCTX_INPROC_HANDLERX86 -> 128
LocalProcess -> localProcess
InProcess -> inProcess
ServerProcess -> serverProcess
AnyProcess -> anyProcess
toEnum x =
case x of
1 -> CLSCTX_INPROC_SERVER
2 -> CLSCTX_INPROC_HANDLER
8 -> CLSCTX_INPROC_SERVER16
16 -> CLSCTX_REMOTE_SERVER
32 -> CLSCTX_INPROC_HANDLER16
64 -> CLSCTX_INPROC_SERVERX86
128 -> CLSCTX_INPROC_HANDLERX86
0x04 -> LocalProcess
0x0b -> InProcess
0x0d -> ServerProcess
4 -> CLSCTX_LOCAL_SERVER
_ -> AnyProcess
localProcess :: Int
localProcess = 0x04
inProcess :: Int
inProcess = 0x0b
serverProcess :: Int
serverProcess = 0x0d
anyProcess :: Int
anyProcess = 0x0f
--VTable method invocation wrappers:
invokeAndCheck :: (Ptr any -> Ptr b -> IO HRESULT) -> Int -> IUnknown a -> IO ()
invokeAndCheck meth offset iptr = do
hr <- primInvokeItFO meth offset (marshallIUnknown iptr)
checkHR hr
-- | @invokeIt (\ methPtr ip -> action) offset obj@ sets up a vtbl-indexed
-- COM call, unmarshalling and indexing @obj@ before passing it along to the
-- method argument. The first argument is typically an FFI wrapped call to
-- a function pointer (@methPtr@ here).
--
--
invokeIt :: (Ptr any -> Ptr c -> IO a) -> Int -> IUnknown b -> IO a
invokeIt meth offset iptr = primInvokeItFO meth offset (marshallIUnknown iptr)
{-
Library provided stubs for IEnum* interfaces - the HaskellDirect compiler
knows how to generate code for these:
-}
enumNext :: Word32 -> (Ptr any -> IO a) -> Word32 -> IUnknown b -> IO [a]
enumNext szof read_elt celt iptr = do
ptr <- allocBytes (fromIntegral (celt * szof))
po <- allocBytes (fromIntegral sizeofWord32)
invokeIt (\ methPtr ip -> primEnumNext methPtr ip celt ptr po) 3 iptr
elts_read <- readWord32 (castPtr po)
-- v <- peek ((castPtr ptr) :: Ptr (Ptr a))
unmarshalllist szof 0 elts_read read_elt ptr
enumNextOne :: Word32 -> (Ptr any -> IO a) -> IUnknown b -> IO (Maybe a)
enumNextOne szof read_elt iptr =
allocaBytes (fromIntegral sizeofWord32) $ \ po -> do
ptr <- allocBytes (fromIntegral szof)
invokeIt (\ methPtr ip -> primEnumNext methPtr ip 1 ptr po) 3 iptr
elts_read <- readWord32 (castPtr po)
if elts_read <= 0
then return Nothing
else do
x <- read_elt (castPtr ptr)
return (Just x)
enumSkip :: Word32 -> IUnknown a -> IO ()
enumSkip count iptr =
invokeIt (\ methPtr ip -> primEnumSkip methPtr ip count) 4 iptr
enumReset :: IUnknown a -> IO ()
enumReset iptr =
invokeIt (\ methPtr ip -> primEnumReset methPtr ip) 5 iptr
enumClone :: IUnknown a -> IO (IUnknown b)
enumClone iptr = do
ppv <- allocOutPtr
invokeIt (\ methPtr ip -> primEnumClone methPtr ip ppv) 6 iptr
doThenFree free (readIUnknown False{-finalise only-}) ppv
{-
BSTRs were introduced by Automation, but their now used in non-Auto
contexts.
-}
data BSTR = BSTR
writeBSTR :: Ptr String -> String -> IO ()
writeBSTR ptr str =
stackString str $ \_ pstr -> do
o_stringToBSTR <- prim_System_Win32_Com_Base_stringToBSTR (castPtr pstr) ptr
checkHR o_stringToBSTR
--readBSTR :: Ptr BSTR -> IO String
readBSTR :: Ptr (Ptr String) -> IO String
readBSTR ptr = do
ptr' <- peek ptr
unmarshallBSTR ptr'
unmarshallBSTR :: Ptr String -> IO String
unmarshallBSTR bstr
| bstr == nullPtr = return ""
| len == 0 = return ""
| otherwise = do
stackStringLen (4 + fromIntegral len) "" $ \ pstr -> do
bstrToStringLen (castPtr bstr) len (castPtr pstr)
unmarshallString pstr
where
len = bstrLen (castPtr bstr)
marshallBSTR :: String -> IO (Ptr String)
marshallBSTR s =
stackString s $ \ _ pstr -> do
ptr <- stringToBSTR (castPtr pstr)
x <- peek (castPtr ptr)
free ptr
return x
freeBSTR x
| x == nullPtr = return ()
| otherwise = P.freeBSTR x
-- This type sometimes appear in IDL and tlbs, so
-- to avoid having to depend on wtypes for it, let's
-- simply define it here.
type LPSTR = String
-- | @coFree ptr@ releases storage that has been allocated via the COM task allocator;
-- explicitly via @coAlloc@ or indirectly via the APIs that handed the pointer to your
-- code.
coFree :: Ptr a -> IO ()
coFree p = freeMemory p
-- | @coAlloc sz@ allocates @sz@ bytes from the COM task allocator, returning a pointer.
-- The onus is on the caller to constrain the type of that pointer to capture what the
-- allocated memory points to.
coAlloc :: Word32 -> IO (Ptr a)
coAlloc sz = allocMemory sz
-- | @ProgID@s are represented in Haskell as mere strings
type ProgID = String
-- | @clsidFromProgID progid@ looks up a @ProgID@ and tries to translate it into
-- its registered @CLSID@. Raises an IO exception if the @ProgID@ isn't known.
clsidFromProgID :: ProgID -> IO CLSID
clsidFromProgID progid =
stackString progid $ \ _ pprogid -> do
pclsid <- coAlloc sizeofCLSID
coOnFail (primCLSIDFromProgID pprogid (castPtr pclsid))
("Component '" ++ progid ++ "' is unknown")
unmarshallCLSID True pclsid
-- | @progIDFromCLSID cid@ is the dual @clsidFromProgID@, attempting to translate
-- in the other direction.
progIDFromCLSID :: CLSID -> IO ProgID
progIDFromCLSID clsid = do
pclsid <- marshallCLSID clsid
pwide <- primProgIDFromCLSID (castForeignPtr pclsid)
(pstr,hr) <- wideToString (castPtr pwide)
checkHR hr
str <- unmarshallString (castPtr pstr)
coFree pstr
coFree pwide
return str
-- | Type libraries are identified by a GUID, the @LIBID@.
type LIBID = GUID
mkLIBID :: String -> LIBID
mkLIBID = mkGUID
type LCID = Word32
-- | Representing interface pointers via @IUnknown a@, where @a@ is
-- the particular @IUnknown@-extended interface, e.g., @IUnknown IDispatch_@.
-- If the interface pointer is /just/ @IUnknown@, use @IUnknown ()@.
--
-- Extend this to @IID@s and parameterize them over the interface they
-- represent.
iidIUnknown :: IID (IUnknown ())
iidIUnknown = mkIID "{00000000-0000-0000-C000-000000000046}"
-- | Equality of interface pointers is defined by the COM spec
-- as being equality of IUnknown (pointers to) implementations.
instance Eq (IUnknown_ a) where
iface1 == iface2 = coEqual (castIface iface1) (castIface iface2)
-- | @castIface obj@ performs a type
castIface :: IUnknown a -> IUnknown b
castIface (Unknown o) = Unknown o
-- | @interfaceNULL@ is the @NULL@ interface pointer.
interfaceNULL :: IUnknown a
interfaceNULL = unsafePerformIO (unmarshallIUnknown False nullPtr)
-- | @isNullInterface iptr@ returns @True@ iff @iptr@ is the NULL pointer.
isNullInterface :: IUnknown a -> Bool
isNullInterface (Unknown ip) = foreignPtrToPtr ip == nullPtr
-- | The null interface
iidNULL :: IID ()
iidNULL = mkIID "{00000000-0000-0000-0000-000000000000}"
instance Show (IUnknown_ a) where
showsPrec _ iface =
shows "<interface pointer = " .
shows (ifaceToAddr iface) .
shows ">"
-- | @queryInterface iid iunk@ queries @iunk@ if it supports the @iid@
-- interface, returning it. Notice that the type parameter to the @IID@
-- matches up with that of the resulting interface pointer, giving you
-- type safety - i.e., you can only use the interface pointer with methods
-- supported by that interface
queryInterface :: IID (IUnknown b) -> IUnknown a -> IO (IUnknown b)
queryInterface riid iptr = do
ppvObject <- allocOutPtr
priid <- marshallIID riid
invokeIt (\ methPtr ip -> primQI methPtr ip (castForeignPtr priid) ppvObject) 0 iptr
doThenFree free (readIUnknown False{-finalise only-}) ppvObject
-- | @addRef iptr@ increases the reference count of the interface pointer @iptr@.
-- Notice that interface pointers are managed and finalized when on the Haskell heap,
-- so manual reference counting is not required (and not encouraged as it could prevent
-- the underlying object from being properly released, should you forget to decrement
-- the ref count with @release@.)
addRef :: IUnknown a -> IO Word32
addRef iptr = invokeIt (\ methPtr ip -> primAddRef methPtr ip) 1 iptr
-- | @addRef iptr@ decrements the reference count of the interface pointer @iptr@.
release :: IUnknown a -> IO Word32
release iptr = invokeIt (\ methPtr ip -> primRelease methPtr ip) 2 iptr
withQueryInterface :: IID (IUnknown b)
-> IUnknown a
-> (IUnknown b -> IO c)
-> IO c
withQueryInterface iid unk action
= bracket (queryInterface iid unk) release action
--HDirect generated stub needed by @coGetObject@:
persistfileLoad :: IPersistFile a -> Ptr Wchar_t -> Word32 -> IO ()
persistfileLoad iptr pszFileName dwMode =
invokeIt (\ methPtr ip -> primPersistLoad methPtr ip pszFileName dwMode) 5 iptr
-- | @GUID@ is the Haskell representation for COM GUIDs.
newtype GUID = GUID (ForeignPtr ()) --(Pointer Guid)
data Guid = Guid
mkGUID :: String -> GUID
mkGUID str = unsafePerformIO (stringToGUID str)
-- | @newGUID@ generates a new unique GUID.
newGUID :: IO GUID
newGUID = do
pg <- coAlloc sizeofGUID
ng <- makeFO pg (castPtrToFunPtr finalFreeMemory)
primNewGUID ng
return (GUID ng)
nullGUID :: GUID
nullGUID = unsafePerformIO $ do
x <- primNullIID
p <- makeFO x (castPtrToFunPtr finalNoFree) --primNoFree
return (GUID p)
marshallGUID :: GUID -> IO (ForeignPtr GUID)
marshallGUID (GUID ptr) = return (castForeignPtr ptr)
-- | A version of the GUID marshaller which copies rather
-- than hands back a pointer to the (immutable) GUID.
copyGUID :: GUID -> IO (Ptr ())
copyGUID (GUID ptr) = do
pg <- coAlloc sizeofGUID
primCopyGUID ptr pg
return pg
-- | @unmarshallGUID finalize ptr@ unpacks a pointer to an incoming
-- GUID, wrapping it up as a Haskell 'GUID'. If @finalize@ is @True@,
-- the GUID is assumed allocated via the COM task allocator and will
-- be freed/finalized when the 'GUID' becomes garbage.
unmarshallGUID :: Bool -> Ptr GUID -> IO GUID
unmarshallGUID finaliseMe ptr = do
-- ToDo: verify that HDirect *never ever* allocates and
-- stores a GUID in malloc()-space, but consistently
-- uses the COM task allocator. (Why? because the
-- finalizer below will tell the COM task allocator
-- to free the GUID once done with it)
f <- makeFO ptr (castPtrToFunPtr $ if finaliseMe then finalFreeMemory else finalNoFree)
return (GUID f)
-- | @writeGUID ptr g@ packs the Haskell 'GUID' into the pointer; that is,
-- it writes a pointer to the GUID value to @ptr@ -- no copying of underlying
-- structure.
writeGUID :: Ptr GUID -> GUID -> IO ()
writeGUID ptr (GUID g) = poke (castPtr ptr) (foreignPtrToPtr g)
readGUID :: Bool -> Ptr GUID -> IO GUID
readGUID finaliseMe ptr = do
-- ptr <- peek ptr
unmarshallGUID finaliseMe ptr
sizeofGUID :: Word32
sizeofGUID = 16
-- | @stringToGUID "{00000000-0000-0000-C000-0000 0000 0046}"@ translates the
-- COM string representation for GUIDs into an actual 'GUID' value.
stringToGUID :: String -> IO GUID
stringToGUID str =
stackWideString str $ \xstr -> do
pg <- coAlloc sizeofGUID
primStringToGUID xstr (castPtr pg)
unmarshallGUID True pg
-- | @stringFromGUID g@ converts the 'GUID' @g@ into the COM string representation
-- @{aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee}@
stringFromGUID :: GUID -> IO String
stringFromGUID guid = do
pguid <- marshallGUID guid
pwide <- primGUIDToString (castForeignPtr pguid)
(pstr,hr) <- wideToString (castPtr pwide)
checkHR hr
str <- unmarshallString (castPtr pstr)
coFree pstr
coFree pwide
return str
guidToString :: GUID -> String
guidToString ptr = unsafePerformIO (stringFromGUID ptr)
{-
-}
-- | Representation of @IID@s: Give the interface identifier
-- a type parameter, so that when we come to define the Haskell
-- type of @IUnknown.QueryInterface()@ we can rely on the type checker
-- to ensure that the @IID@ passed to @QueryInterface@ agrees with
-- the interface at which we're using the interface pointer that's
-- returned
newtype IID a = IID GUID deriving ( Eq )
newtype CLSID = CLSID GUID deriving ( Eq )
mkIID :: String -> IID a
mkIID str = IID (mkGUID str)
mkCLSID :: String -> CLSID
mkCLSID str = CLSID (mkGUID str)
-- no need to provide marshallers for these, the IDL compiler
-- knows that they're both represented by GUIDs.
stringToIID :: String -> IID a
stringToIID str = mkIID str
stringToCLSID :: String -> CLSID
stringToCLSID str = mkCLSID str
iidToString :: IID a -> String
iidToString (IID i) = guidToString i
clsidToString :: CLSID -> String
clsidToString (CLSID clsid) = guidToString clsid
iidToGUID :: IID a -> GUID
iidToGUID (IID g) = g
castIID :: IID a -> IID b
castIID (IID i) = IID i
clsidToGUID :: CLSID -> GUID
clsidToGUID (CLSID g) = g
clsidToDisplayName :: CLSID -> String
clsidToDisplayName (CLSID g) = "clsid:" ++ tail (init (show g))
guidToIID :: GUID -> IID a
guidToIID g = IID g
guidToCLSID :: GUID -> CLSID
guidToCLSID g = CLSID g
instance Show (IID a) where
showsPrec _ (IID i) = showString (guidToString i)
instance Show CLSID where
showsPrec _ (CLSID c) = showString (guidToString c)
instance Show GUID where
showsPrec _ guid = showString (guidToString guid)
instance Eq GUID where
(GUID x) == (GUID y) = unsafePerformIO $ do
return (isEqualGUID x y)
marshallIID :: IID a -> IO (ForeignPtr (IID a))
marshallIID (IID x) = marshallGUID x >>= return.castForeignPtr
unmarshallIID :: Bool -> Ptr (IID a) -> IO (IID a)
unmarshallIID finaliseMe x = do
i <- unmarshallGUID finaliseMe (castPtr x)
return (IID i)
copyIID (IID x) = copyGUID x
readIID :: Bool -> Ptr (Ptr (IID a)) -> IO (IID a)
readIID finaliseMe ptr = do
a <- peek ptr
unmarshallIID finaliseMe (castPtr a)
writeIID :: Ptr (IID a) -> IID a -> IO ()
writeIID ptr (IID i) = writeGUID (castPtr ptr) i
--------
marshallCLSID (CLSID x) = marshallGUID x
unmarshallCLSID :: Bool -> Ptr CLSID -> IO CLSID
unmarshallCLSID finaliseMe x = do
i <- unmarshallGUID finaliseMe (castPtr x)
return (CLSID i)
copyCLSID (CLSID x) = copyGUID x
readCLSID :: Bool -> Ptr (Ptr CLSID) -> IO CLSID
readCLSID finaliseMe ptr = do
a <- peek ptr
unmarshallCLSID finaliseMe (castPtr a)
writeCLSID :: Ptr CLSID -> CLSID -> IO ()
writeCLSID ptr (CLSID i) = writeGUID (castPtr ptr) i
sizeofCLSID = sizeofGUID
coInitialize :: IO ()
coInitialize = comInitialize
coUnInitialize :: IO ()
coUnInitialize = comUnInitialize
coUnInitializeEx :: IO ()
coUnInitializeEx = comUnInitializeEx
coInitializeEx :: IO ()
coInitializeEx = comInitializeEx
sizeofIID = sizeofGUID
coEqual :: IUnknown a -> IUnknown b -> Bool
coEqual ip1 ip2 = unsafePerformIO $ primComEqual (castIface ip1) (castIface ip2)
--Interface pointer marshallers:
-- marshallIUnknown is in Base.idl
unmarshallIUnknown :: Bool -> Ptr b -> IO (IUnknown a)
unmarshallIUnknown finaliseMe x = do
ip <- addrToIPointer finaliseMe x
case finaliseMe of
True | x /= nullPtr -> ip # addRef >> return ip
_ -> return ip
unmarshallIUnknownFO :: ForeignPtr b -> IO (IUnknown a)
unmarshallIUnknownFO i = return (Unknown (castForeignPtr i))
-- ToDo: I believe it is correct never to do an AddRef()
-- here, but double-check the spec.
{-
addRefMe == True => attach finaliser (which calls Release()), and
call addRef on i-pointer before returning.
== False => attach finaliser (which calls Release()) only.
The former case is used when you receive an i-pointer from the outside
world and want to copy a reference to it into the Haskell heap. This
does not include i-pointers you receive via [out] pointers when calling
a COM component method from Haskell, where it is the obligation of the
server filling in the [out] pointer to call addRef() for you.
-}
readIUnknown :: Bool -> Ptr b -> IO (IUnknown a)
readIUnknown addRefMe x = do
ptr <- peek (castPtr x)
ip <- addrToIPointer True ptr
case addRefMe of
True | x /= nullPtr -> ip # addRef >> return ip
_ -> return ip
writeIUnknown :: Bool -> Ptr (Ptr (IUnknown b)) -> IUnknown a -> IO ()
writeIUnknown addRefMe x v = do
let a = ifaceToAddr v
when (addRefMe && a /= nullPtr)
(v # addRef >> return ())
writePtr x a
{-
@withObject@ applies every method in a list to an
object: @withObject genie [showUp, speak "hi", hide]@.
@withMethod@ applies every argument in a list to a
method: @genie # withMethod speak ["hello", "world"]@.
-}
withObject_ :: IUnknown a -> [IUnknown a -> IO b] -> IO ()
withObject_ obj = sequence_ . map ( obj # )
withMethod_ :: (a -> IUnknown b -> IO c) -> [a] -> IUnknown b -> IO ()
withMethod_ method args obj = sequence_ $ map (\x -> obj # method x) args
withObject :: IUnknown a -> [IUnknown a -> IO b] -> IO [b]
withObject obj = sequence . map ( obj # )
withMethod :: (a -> IUnknown b -> IO c) -> [a] -> IUnknown b -> IO [c]
withMethod method args obj = sequence $ map (\x -> obj # method x) args
loadTypeLib :: String -> IO (IUnknown a)
loadTypeLib fname = do
ptr <- allocOutPtr
stackWideString fname $ \pfname -> do
Base.loadTypeLib pfname ptr
doThenFree free (readIUnknown False{-finalise only-}) ptr
loadRegTypeLib :: GUID -> Int -> Int -> Int -> IO (IUnknown a)
loadRegTypeLib guid maj min lcid = do
ptr <- allocOutPtr
p_guid <- marshallGUID guid
Base.loadRegTypeLib (castForeignPtr p_guid)
(fromIntegral maj) (fromIntegral min)
(fromIntegral lcid) ptr
doThenFree free (readIUnknown False{-finalise only-}) ptr
queryPathOfRegTypeLib :: GUID
-> Word16
-> Word16
-> IO String
queryPathOfRegTypeLib gd maj min = do
pgd <- marshallGUID gd
pbstr <- primQueryPathOfRegTypeLib (castForeignPtr pgd) maj min
if nullPtr == pbstr then
return ""
else do
str <- unmarshallBSTR (castPtr pbstr)
freeBSTR pbstr
return str
createTypeLib :: String -> IO (IUnknown a) --(ICreateTypeLib a)
createTypeLib nm = do
wstr <- stringToWide nm
pptr <- primCreateTypeLib 1{-SYS_WIN32-} wstr
doThenFree free (readIUnknown False{-finalise only-}) pptr
loadTypeLibEx :: String -> Bool -> IO (IUnknown a)
loadTypeLibEx path reg_tlb = do
let
{-
This Int is used to map onto the following enum
typedef enum tagREGKIND { REGKIND_DEFAULT, REGKIND_REGISTER, REGKIND_NONE };
-}
rkind :: Int
rkind
| reg_tlb = 1
| otherwise = 2
out_ptr <- allocOutPtr
stackWideString path $ \pfname -> do
Base.loadTypeLibEx pfname (fromIntegral rkind) out_ptr
doThenFree free (readIUnknown False{-finalise only-}) out_ptr
| jjinkou2/ComForGHC7.4 | System/Win32/Com.hs | bsd-3-clause | 34,074 | 410 | 15 | 7,830 | 7,359 | 3,923 | 3,436 | -1 | -1 |
module Oracle.Oracle
( module Oracle.Internal
, module Oracle.NoOracle
, module Oracle.NoDupBranches
, module Oracle.DiffOracle
, module Oracle.DisjointOracle
) where
import Oracle.Internal
import Oracle.NoOracle
import Oracle.NoDupBranches
import Oracle.DiffOracle
import Oracle.DisjointOracle
| nazrhom/vcs-clojure | src/Oracle/Oracle.hs | bsd-3-clause | 308 | 0 | 5 | 42 | 63 | 40 | 23 | 11 | 0 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
module Data.TypeLevel.List
( (:::)
, E
, Reverse
) where
infixr 0 :::
-- | A type-level encoding of a list node.
data head ::: tail
-- | A type-level encoding of the empty list.
data E
-- | Reverses list /x/ to produce list /y/.
class Reverse x y | x -> y
instance R x E y => Reverse x y
-- | Reverses list /x/ to produce list /y/ using accumulator /a/.
class R x a y | x a -> y
instance R E a a
instance R x (p:::a) z => R (p:::x) a z
| jonathanknowles/hs-type-level-prime-product | Data/TypeLevel/List.hs | bsd-3-clause | 749 | 4 | 8 | 222 | 147 | 85 | 62 | -1 | -1 |
-- | A module dealing with pandoc file extensions and associated file types
--
module Hakyll.Web.Pandoc.FileType
( FileType (..)
, fileType
, getFileType
) where
import System.FilePath (takeExtension)
import Control.Arrow ((>>^))
import Hakyll.Core.Identifier
import Hakyll.Core.Compiler
-- | Datatype to represent the different file types Hakyll can deal with by
-- default
--
data FileType
= Binary
| Css
| Html
| LaTeX
| LiterateHaskell FileType
| Markdown
| OrgMode
| PlainText
| Rst
| Textile
deriving (Eq, Ord, Show, Read)
-- | Get the file type for a certain file. The type is determined by extension.
--
fileType :: FilePath -> FileType
fileType = fileType' . takeExtension
where
fileType' ".css" = Css
fileType' ".htm" = Html
fileType' ".html" = Html
fileType' ".lhs" = LiterateHaskell Markdown
fileType' ".markdown" = Markdown
fileType' ".md" = Markdown
fileType' ".mdn" = Markdown
fileType' ".mdown" = Markdown
fileType' ".mdwn" = Markdown
fileType' ".mkd" = Markdown
fileType' ".mkdwn" = Markdown
fileType' ".org" = OrgMode
fileType' ".page" = Markdown
fileType' ".rst" = Rst
fileType' ".tex" = LaTeX
fileType' ".text" = PlainText
fileType' ".textile" = Textile
fileType' ".txt" = PlainText
fileType' _ = Binary -- Treat unknown files as binary
-- | Get the file type for the current file
--
getFileType :: Compiler a FileType
getFileType = getIdentifier >>^ fileType . toFilePath
| sol/hakyll | src/Hakyll/Web/Pandoc/FileType.hs | bsd-3-clause | 1,621 | 0 | 7 | 442 | 320 | 181 | 139 | -1 | -1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.AR.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale AR Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 1)
[ "الاول"
, "الأول"
, "اول"
]
, examples (OrdinalData 2)
[ "الثاني"
, "الثان"
, "ثاني"
]
, examples (OrdinalData 3)
[ "الثالث"
, "ثالث"
]
, examples (OrdinalData 4)
[ "الرابع"
, "رابع"
]
, examples (OrdinalData 8)
[ "الثامن"
, "ثامن"
]
, examples (OrdinalData 11)
[ "الأحد عشر"
, "الإحدى عشرة"
, "الحادي عشرة"
]
, examples (OrdinalData 12)
[ "الثاني عشرة"
, "الثان عشر"
, "الاثنى عشر"
]
, examples (OrdinalData 13)
[ "الثالث عشر"
, "الثالثة عشرة"
]
, examples (OrdinalData 21)
[ "الحادي والعشرين"
, "الواحد و العشرون"
]
, examples (OrdinalData 25)
[ "الخامس والعشرين"
, "الخامس و العشرون"
]
, examples (OrdinalData 31)
[ "الواحد والثلاثون"
, "الواحد والثلاثين"
]
, examples (OrdinalData 72)
[ "الثان والسبعون"
, "الثاني والسبعين"
]
, examples (OrdinalData 90)
[ "التسعون"
, "التسعين"
]
]
| facebookincubator/duckling | Duckling/Ordinal/AR/Corpus.hs | bsd-3-clause | 2,250 | 0 | 9 | 818 | 369 | 213 | 156 | 56 | 1 |
{-# LANGUAGE TupleSections, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeFamilies, RecordWildCards,
DeriveGeneric ,MultiParamTypeClasses ,FlexibleInstances #-}
module Protocol.ROC.PointTypes.PointType12 where
import GHC.Generics
import Data.Word
import Data.Binary
import Protocol.ROC.Utils
data PointType12 = PointType12 {
pointType12Seconds :: !PointType12Seconds
,pointType12Minutes :: !PointType12Minutes
,pointType12Hours :: !PointType12Hours
,pointType12Day :: !PointType12Day
,pointType12Month :: !PointType12Month
,pointType12Year :: !PointType12Year
,pointType12LeapYear :: !PointType12LeapYear
,pointType12DayofWeek :: !PointType12DayofWeek
,pointType12TimeSMHDMY :: !PointType12TimeSMHDMY
,pointType12Century :: !PointType12Century
,pointType12EnableDaySavTime :: !PointType12EnableDaySavTime
} deriving (Read,Eq, Show, Generic)
type PointType12Seconds = Word8
type PointType12Minutes = Word8
type PointType12Hours = Word8
type PointType12Day = Word8
type PointType12Month = Word8
type PointType12Year = Word8
type PointType12LeapYear = Word8
type PointType12DayofWeek = Word8
type PointType12TimeSMHDMY = [Word8]
type PointType12Century = Word8
type PointType12EnableDaySavTime = Bool
pointType12Parser :: Get PointType12
pointType12Parser = do
seconds <- getWord8
minutes <- getWord8
hours <- getWord8
day <- getWord8
month <- getWord8
year <- getWord8
leapYear <- getWord8
dayOfWeek <- getWord8
timeSMHDMY <- getTime
century <- getWord8
enableDaySavTime <- anyButNull
return $ PointType12 seconds minutes hours day month year leapYear dayOfWeek timeSMHDMY century enableDaySavTime
| jqpeterson/roc-translator | src/Protocol/ROC/PointTypes/PointType12.hs | bsd-3-clause | 2,391 | 0 | 9 | 952 | 324 | 179 | 145 | 67 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
-------------------------------------------------------------------
-- |
-- Module : Irreverent.Bitbucket.Http.Repositories.Pipelines.DeleteEnvironmentVariable
-- Copyright : (C) 2017 - 2018 Irreverent Pixel Feats
-- License : BSD-style (see the file /LICENSE.md)
-- Maintainer : Dom De Re
--
-------------------------------------------------------------------
module Irreverent.Bitbucket.Http.Repositories.Pipelines.DeleteEnvironmentVariable (
-- * Functions
deletePipelinesEnvironmentVariable
) where
import Irreverent.Bitbucket.Http.Common
import Irreverent.Bitbucket.Http.Error
import Irreverent.Bitbucket.Http.Methods
import Irreverent.Bitbucket.Core.Control (BitbucketT(..))
import Irreverent.Bitbucket.Core.Data.Common (
Username(..)
, RepoName(..)
, Uuid(..)
)
import Ultra.Control.Monad.Catch (MonadCatch(..))
import Ultra.Control.Monad.Trans.Either (EitherT)
import qualified Ultra.Data.Text as T
import qualified Network.Wreq.Session as S
import Preamble
deletePipelinesEnvironmentVariable
:: (MonadCatch m, MonadIO m)
=> S.Session
-> Username
-> RepoName
-> Uuid
-> EitherT BitbucketAPIError (BitbucketT m) ()
deletePipelinesEnvironmentVariable sess owner reponame uuid =
let
endpoint :: T.Text
endpoint = T.concat [
baseV2URL
, "/repositories/"
, getUsername owner
, "/"
, getRepoName reponame
, "/pipelines_config/variables/"
, getUuid uuid
]
in bitbucketDelete sess endpoint
| irreverent-pixel-feats/bitbucket | bitbucket-http-client/src/Irreverent/Bitbucket/Http/Repositories/Pipelines/DeleteEnvironmentVariable.hs | bsd-3-clause | 1,574 | 0 | 12 | 255 | 269 | 170 | 99 | 36 | 1 |
module ThreadsLang.ParserSuite
( tests
) where
import Test.HUnit.Base
import Text.Megaparsec
import Text.Megaparsec.String
import ThreadsLang.Data
import ThreadsLang.Parser
tests :: Test
tests = TestList
[ TestLabel "Test const expression" testConstExpr
, TestLabel "Test binary-operator expression" testBinOpExpr
, TestLabel "Test unary-operator expression" testUnaryOpExpr
, TestLabel "Test null-operator expression" testNullOpExpr
, TestLabel "Test condition expression" testCondExpr
, TestLabel "Test var expression" testVarExpr
, TestLabel "Test let expression" testLetExpr
, TestLabel "Test expression" testExpression
, TestLabel "Test parse program" testParseProgram
, TestLabel "Test parse proc expression" testParseProc
]
constNum = ConstExpr . ExprNum
constBool = ConstExpr . ExprBool
parserEqCase :: (Eq a, Show a) => Parser a -> String -> a -> String -> Test
parserEqCase parser msg expect input =
TestCase $ assertEqual msg (Right expect) runP
where runP = runParser parser "Test equal case" input
parserFailCase :: (Eq a, Show a) => Parser a -> String -> String -> Test
parserFailCase parser msg input =
TestCase $ assertBool msg (isLeft runP)
where
isLeft (Left _) = True
isLeft (Right _) = False
runP = runParser parser "Test fail case" input
testEq :: String -> Expression -> String -> Test
testEq = parserEqCase expression
testFail :: String -> String -> Test
testFail = parserFailCase expression
testConstExpr :: Test
testConstExpr = TestList
[ testEq "Parse single number" (constNum 5) "5"
, testEq "Parse multi-numbers" (constNum 123) "123"
, testEq "Parse negative number" (constNum (-233)) "-233"
]
testBinOpExpr :: Test
testBinOpExpr = TestList
[ testEq "Parse '-' expression (no space)"
(BinOpExpr Sub (constNum 3) (constNum 4))
"-(3,4)"
, testEq "Parse '*' expression (with spaces)"
(BinOpExpr Mul (constNum 10) (constNum 24))
"* ( 10 , 24 )"
, testEq "Parse binary num-to-bool expression"
(BinOpExpr Gt (constNum 1) (constNum 2))
"greater?(1, 2)"
]
testUnaryOpExpr :: Test
testUnaryOpExpr = TestList
[ testEq "Parse isZero expression (no space)"
(UnaryOpExpr IsZero (constNum 1)) "zero?(1)"
, testEq "Parse isZero expression (with space)"
(UnaryOpExpr IsZero (constNum 3)) "zero? ( 3 )"
, testEq "Parse minus expression"
(UnaryOpExpr Minus (constNum 1)) "minus(1)"
]
testNullOpExpr :: Test
testNullOpExpr = TestList
[ testEq "Parse mutex expression" (NullOpExpr Mut) "mutex()"
, testEq "Parse yield expression" (NullOpExpr Yield) "yield()"
]
testCondExpr :: Test
testCondExpr = TestList
[ testEq "Parse if expression"
(IfExpr (UnaryOpExpr IsZero (constNum 3)) (constNum 4) (constNum 5))
"if zero?(3) then 4 else 5"
]
testVarExpr :: Test
testVarExpr = TestList
[ testEq "Parse var expression" (VarExpr "foo") "foo"
, testFail "Parse reserved word should fail" "then"
]
testLetExpr :: Test
testLetExpr = TestList
[ testEq "Parse let expression with 0 binding"
(LetExpr [] (VarExpr "bar"))
"let in bar"
, testEq "Parse let expression with 1 binding"
(LetExpr [("bar", constNum 1)] (VarExpr "bar"))
"let bar = 1 in bar"
, testEq "Parse let expression with multi bindings"
(LetExpr [("x", constNum 1), ("y", constNum 2), ("z", constNum 3)]
(VarExpr "bar"))
"let x = 1 y = 2 z = 3 in bar"
, testEq "Parse let recursive expression"
(LetRecExpr
[("double", ["xxx"], BinOpExpr Mul (constNum 2) (VarExpr "xxx"))]
(CallExpr (VarExpr "double") [constNum 5]))
"letrec double(xxx) = *(2, xxx) in (double 5)"
]
testExpression :: Test
testExpression = TestList
[ testEq "Parse complex expression"
(LetExpr [("bar", constNum 1)]
(IfExpr
(UnaryOpExpr IsZero (VarExpr "bar"))
(constNum 3)
(VarExpr "zero")))
"let bar = 1 in if zero? (bar) then 3 else zero"
]
testParseProgram :: Test
testParseProgram = TestList
[ testEq "Parse program (with spaces)"
(Prog (LetExpr [("x", constNum 3)]
(VarExpr "x")))
"let x = 3 in x"
]
where
testEq msg expect prog = TestCase $
assertEqual msg (Right expect) (parseProgram prog)
testParseProc :: Test
testParseProc = TestList
[ testEq "Parse proc expression"
(ProcExpr ["x"] (VarExpr "x"))
"proc (x) x"
, testEq "Parse call expression"
(CallExpr
(ProcExpr
["f"]
(CallExpr (VarExpr "f") [CallExpr (VarExpr "f") [constNum 77]]))
[ProcExpr ["x"] (BinOpExpr Sub (VarExpr "x") (constNum 11))])
"(proc (f) (f (f 77)) proc (x) -(x,11))"
]
| li-zhirui/EoplLangs | test/ThreadsLang/ParserSuite.hs | bsd-3-clause | 4,976 | 0 | 17 | 1,333 | 1,298 | 671 | 627 | 118 | 2 |
{-+
This module defines the represenation of types, qualified types, type schemes,
substitution and various auxiliary types.
-}
module TiTypes(module TiTypes,HsTypeI(..),HsIdentI(..),Kind) where
import Syntax(HsTypeI(..),TI(..),HsIdentI(..),HsFunDeps,
hsTyFun,hsTyApp,hsTyTuple,hsTyCon,hsTyVar,
kstar,base,mapTI,accT)
import List(nub,(\\))
import Maybe(fromMaybe)
import MUtils(collectByFst)
--import TiEnv(Env,range)
import TiKinds(Kind)
import TiNames
import Debug.Trace(trace) -- for debugging
type Type i = HsTypeI i
type Pred i = Type i
data Qual i t = [Pred i] :=> t deriving (Eq,Show,Read)
type QType i = Qual i (Type i)
data Scheme v = Forall [Kinded v] [Kinded v] (QType v)
deriving (Eq,Show,Read) -- Eq??
unQual t = []:=>t
forall' = Forall []
mono t = forall' [] (unQual t)
--uscheme qt = fakeForall (tv qt) qt -- temporary hack!!!
-- where fakeForall vs qt = Forall (map (:>:kstar) vs) qt -- temporary hack!!!
--upscheme t = uscheme (unQual t) -- temporary hack!!!
kuscheme ks qt = forall' (kinded ks (tv qt)) qt
kupscheme ks t = kuscheme ks (unQual t)
funT ts = foldr1 hsTyFun ts -- :: Type
appT ts = foldl1 hsTyApp ts -- :: Type
tupleT ts = hsTyTuple ts -- :: Type
tyvar v = hsTyVar v -- :: Tyvar -> Type
ty (HsVar v) = tyvar v
ty (HsCon c) = hsTyCon c -- :: Type
isVarT (Typ (HsTyVar v)) = Just v
isVarT _ = Nothing
isFunT (Typ (HsTyFun t1 t2)) = Just (t1,t2)
isFunT _ = Nothing
flatAppT t = flat t []
where
flat (Typ (HsTyApp t1 t2)) ts = flat t1 (t2:ts)
flat t ts = (t,ts)
flatConAppT ty =
case flatAppT ty of
(Typ (HsTyCon c),ts) -> Just (HsCon c,ts)
_ -> Nothing
--instName (SrcLoc f l c) = HsVar (Qual (Module "i") (show l++"_"++show c))
--dictName n = HsVar (Qual (Module "d") n) :: QId
--vvar = unqual :: String->VarId
infix 1 :>:
data Typing x t = x :>: t deriving (Eq,Show,Read)
type Assump i = Typing (HsIdentI i) (Scheme i)
type Typed i x = Typing x (Type i)
emap f (e :>: t) = f e:>:t
tdom xts = [x|x:>:_<-xts]
unzipTyped :: [Typing x t] -> Typing [x] [t]
unzipTyped ets = uncurry (:>:) $ unzip [(e,t)|e:>:t<-ets]
zipTyped :: Typing [x] [t] -> [Typing x t]
zipTyped (xs:>:ts) = zipWith (:>:) xs ts
--collectTyped :: Ord x => [Typing x t] -> [Typing x [t]]
collectTyped xts = map (uncurry (:>:)) $ collectByFst [(x,t)|x:>:t<-xts]
type Kinded x = Typing x Kind
kinded1 ks v = v:>:head' [k|HsVar v':>:k<-ks,v'==v] -- not very efficient...
where head' [] = trace ("Bug in TiTypes.kinded1: missing kind info for "++show v) kstar
head' (k:_) = k
kinded ks = map (kinded1 ks)
--type KAssump = Typing Id Kind
type KInfo i = [Typing (HsIdentI i) (TypeInfo i)]
data TypeInfo i
= Data
| Newtype
| Class [Pred i] -- superclasses
[Kinded i] -- parameters
(HsFunDeps Int) -- fun deps (0-based parameter positions)
[Assump i] -- methods
| Synonym [i] (Type i)
| Tyvar
deriving ({-Eq,-}Show,Read)
newtype Subst i = S [(i,Type i)] deriving (Show)
idS = S []
infix 5 +->
v+-> t = S [(v,t)]
extS v t s = compS (v+->t) s
compS s1@(S s1') s2 = S (s1'++s2')
where S s2' = apply s1 s2
domS (S s) = map fst s
varSubst s@(S s') v = fromMaybe (tyvar v) (lookup v s')
applySubst s@(S s') ty@(Typ t) =
case t of
HsTyVar v -> fromMaybe ty (lookup v s')
_ -> base $ mapTI id (applySubst s) t
class TypeVar v => Types v t | t->v where
tmap :: (Type v->Type v) -> t -> t
apply :: Subst v -> t -> t
tv :: t -> Set v
apply = tmap . applySubst
type Set a = [a]
occurs v t = v `elem` tv t
instance Types v t => Types v [t] where
tmap = map . tmap
tv = nub . concatMap tv
instance TypeVar v => Types v (Subst v) where
tmap f (S s') = S [(v,tmap f t)|(v,t)<-s'] -- hmm
tv (S s') = tv (map snd s') -- hmm
instance (Types v t1,Types v t2) => Types v (t1,t2) where
tmap f (t1,t2) = (tmap f t1,tmap f t2)
tv (t1,t2) = nub (tv t1++tv t2)
instance (Types v t1,Types v t2,Types v t3) => Types v (t1,t2,t3) where
tmap f (t1,t2,t3) = (tmap f t1,tmap f t2,tmap f t3)
tv (t1,t2,t3) = nub (tv t1++tv t2++tv t3)
instance Types v t => Types v (Typing x t) where
tmap f (x:>:t) = x:>:tmap f t
tv (x:>:t) = tv t
instance Functor (Typing x) where
fmap f (x:>:t) = x:>:f t -- hmm
instance TypeVar v => Types v (Type v) where
tmap = id
tv (Typ t) =
case t of
HsTyVar v -> [v]
HsTyForall vs ps t -> tv (ps,t) \\ vs
_ -> nub $ accT ((++) . tv) t []
instance TypeVar v => Types v (Scheme v) where
apply s (Forall ags gs qt) = Forall ags gs (apply (restrict s (tdom (ags++gs))) qt)
tmap f (Forall ags gs qt) = Forall ags gs (tmap f qt) -- hmm
tv (Forall ags gs qt) = tv qt \\ tdom (ags++gs)
restrict (S s) gs = S [s1|s1@(v,_)<-s,v `notElem` gs]
instance Types v t => Types v (Qual v t) where
tmap f (ps:=>t) = tmap f ps:=>tmap f t
tv (ps:=>t) = tv (ps,t)
{-
instance Types v info => Types v (Env key info) where
tmap = fmap . tmap
tv = tv . range
-}
| forste/haReFork | tools/base/TI/TiTypes.hs | bsd-3-clause | 4,969 | 24 | 14 | 1,176 | 2,199 | 1,216 | 983 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module PFDS.Sec3.Ex5 where
class Ord e => BinomialHeap h e where
empty :: [h e]
isEmpty :: [h e] -> Bool
rank :: h e -> Int
root :: h e -> e
link :: h e -> h e -> h e
insTree :: h e -> [h e] -> [h e]
insert :: e -> [h e] -> [h e]
merge :: [h e] -> [h e] -> [h e]
removeMinTree :: [h e] -> (h e, [h e])
findMin :: [h e] -> e
deleteMin :: [h e] -> [h e]
data Tree e = Node Int e [Tree e] deriving (Show)
instance Ord e => BinomialHeap Tree e where
empty = []
isEmpty = null
rank (Node r _ _) = r
root (Node _ x _) = x
link t1@(Node r x1 c1) t2@(Node _ x2 c2) =
if x1 <= x2
then Node (r+1) x1 (t2:c1)
else Node (r+1) x2 (t1:c2)
insTree t [] = [t]
insTree t ts@(t':ts') =
if rank t < rank t'
then t:ts
else insTree (link t t') ts'
insert x ts = insTree (Node 0 x []) ts
merge ts1 [] = ts1
merge [] ts2 = ts2
merge ts1@(t1:ts1') ts2@(t2:ts2')
| rank t1 < rank t2 = t1:merge ts1' ts2
| rank t2 < rank t1 = t2:merge ts1 ts2'
| otherwise = insTree (link t1 t2) (merge ts1' ts2')
removeMinTree [] = undefined
removeMinTree [t] = (t, [])
removeMinTree (t:ts) = let
(t', ts') = removeMinTree ts
in if root t <= root t'
then (t, ts)
else (t', t:ts')
-- 3.5
findMin = minimum . map root
deleteMin ts =
let (Node _ _ ts1, ts2) = removeMinTree ts
in merge (reverse ts1) ts2
| matonix/pfds | src/PFDS/Sec3/Ex5.hs | bsd-3-clause | 1,534 | 0 | 11 | 498 | 824 | 421 | 403 | 48 | 0 |
module Renzu.Prism where
----------------------------------------------------------------
import Control.Monad
import Text.Read
import Renzu.Optic
import Renzu.Profunctor
----------------------------------------------------------------
type Prism s t a b = forall p. Choice p => Optic p s t a b
type Prism' s a = Prism s s a a
----------------------------------------------------------------
prism :: (b -> t) -> (s -> Either t a) -> Prism s t a b
-- (b -> t) -> (s -> Either t a) -> (forall p. Choice p => p a b -> p s t)
prism to from = dimap from (either id to) . right
{-# INLINE prism #-}
prism' :: (b -> s) -> (s -> Maybe a) -> Prism s s a b
prism' to from = prism to (\s -> maybe (Left s) Right $ from s)
{-# INLINE prism' #-}
----------------------------------------------------------------
_Left :: Prism (Either a c) (Either b c) a b
_Left = left
{-# INLINE _Left #-}
_Right :: Prism (Either c a) (Either c b) a b
_Right = right
{-# INLINE _Right #-}
----------------------------------------------------------------
_Just :: Prism (Maybe a) (Maybe b) a b
_Just = prism Just $ maybe (Left Nothing) Right
{-# INLINE _Just #-}
_Nothing :: Prism (Maybe a) (Maybe a) () a
_Nothing = prism Just $ maybe (Right ()) (Left . Just)
{-# INLINE _Nothing #-}
----------------------------------------------------------------
_Show :: (Read a, Show a) => Prism' String a
_Show = prism' show readMaybe
{-# INLINE _Show #-}
----------------------------------------------------------------
only :: Eq a => a -> Prism' a ()
only a = nearly a (== a)
{-# INLINE only #-}
nearly :: a -> (a -> Bool) -> Prism' a ()
nearly a p = prism' (const a) (guard . p)
{-# INLINE nearly #-}
| acple/renzu | src/Renzu/Prism.hs | bsd-3-clause | 1,690 | 0 | 11 | 307 | 550 | 292 | 258 | -1 | -1 |
{-# LANGUAGE
MultiParamTypeClasses
, FunctionalDependencies
#-}
module Data.Trie.Class where
import Prelude hiding (lookup)
-- import qualified Data.Trie as BT
import qualified Data.ByteString as BS
import Data.Maybe (isJust)
import Data.Foldable as F
import Data.Functor.Identity (Identity (..))
-- | Class representing tries with single-threaded insertion, deletion, and lookup.
-- @forall ts ps a. isJust $ lookupPath ps (insertPath ps a ts)@
-- @forall ts ps. isNothing $ lookupPath ps (deletePath ps ts)@
class Trie p s t | t -> p where
lookup :: p s -> t s a -> Maybe a
insert :: p s -> a -> t s a -> t s a
delete :: p s -> t s a -> t s a
member :: Trie p s t => p s -> t s a -> Bool
member t = isJust . lookup t
notMember :: Trie p s t => p s -> t s a -> Bool
notMember t = not . member t
-- * Conversion
fromFoldable :: (Foldable f, Monoid (t s a), Trie p s t) => f (p s, a) -> t s a
fromFoldable = F.foldr (uncurry insert) mempty
-- -- * ByteString-Trie
-- -- | Embeds an empty ByteString passed around for type inference.
-- newtype BSTrie q a = BSTrie {unBSTrie :: (q, BT.Trie a)}
-- makeBSTrie :: BT.Trie a -> BSTrie BS.ByteString a
-- makeBSTrie x = BSTrie (mempty,x)
-- getBSTrie :: BSTrie BS.ByteString a -> BT.Trie a
-- getBSTrie (BSTrie (_,x)) = x
-- instance Trie Identity BS.ByteString BSTrie where
-- lookup (Identity ps) (BSTrie (_,xs)) = BT.lookup ps xs
-- insert (Identity ps) x (BSTrie (q,xs)) = BSTrie (q, BT.insert ps x xs)
-- delete (Identity ps) (BSTrie (q,xs)) = BSTrie (q, BT.delete ps xs)
| athanclark/tries | src/Data/Trie/Class.hs | bsd-3-clause | 1,558 | 0 | 10 | 330 | 343 | 185 | 158 | 19 | 1 |
-- | This internal module is exposed only for testing and benchmarking. You
-- don't need to import it.
module Text.Regex.Applicative.StateQueue
( StateQueue
, empty
, insert
, insertUnique
, getElements
) where
import Prelude hiding (read, lookup, replicate)
import qualified Data.IntSet as IntSet
import Data.Foldable as F
-- | 'StateQueue' is a data structure that can efficiently insert elements
-- (preserving their order)
-- and check whether an element with the given 'Int' key is already in the queue.
data StateQueue a = StateQueue
{ elements :: [a]
, ids :: !IntSet.IntSet
}
deriving (Eq,Show)
instance Foldable StateQueue where
foldr f a = F.foldr f a . getElements
-- | Get the list of all elements
getElements :: StateQueue a -> [a]
getElements = reverse . elements
{-# INLINE empty #-}
-- | The empty state queue
empty :: StateQueue a
empty = StateQueue
{ elements = []
, ids = IntSet.empty
}
{-# INLINE insert #-}
-- | Insert an element in the state queue, unless there is already an element with the same key
insertUnique
:: Int -- ^ key
-> a
-> StateQueue a
-> StateQueue a
insertUnique i v sq@StateQueue { ids = ids, elements = elements } =
if i `IntSet.member` ids
then sq
else sq { elements = v : elements
, ids = IntSet.insert i ids
}
-- | Insert an element in the state queue without a key.
--
-- Since 'insert' doesn't take a key, it won't affect any 'insertUnique'.
insert
:: a
-> StateQueue a
-> StateQueue a
insert v sq =
sq { elements = v : elements sq }
| mitchellwrosen/regex-applicative | Text/Regex/Applicative/StateQueue.hs | mit | 1,627 | 0 | 10 | 417 | 330 | 194 | 136 | 41 | 2 |
-- The parser from the NoFib benchmark suite
-- TODO: getContents implementeren en gebruiken ipv readFile "big_big_test.hs"
-- TODO: internal error fixen
-- ==========================================================--
-- === Raw lexical analysis (tokenisation) of source ===--
-- === Lexer.hs ===--
-- ==========================================================--
module Parser where
import Char -- 1.3
----------------------------------------------------------
-- Lexemes --
----------------------------------------------------------
type Token = (Int, Int, Lex, String) -- (line, column, lexeme type, value)
data Lex = Lcon -- constructor used as prefix:
-- normal prefix constructor,
-- or bracketed infix constructor
| Lconop -- constructor used as infix:
-- normal prefix constructor in backquotes,
-- or infix constructor (starting with ":")
| Lvar -- variable used as prefix:
-- normal prefix variable,
-- or bracketed infix var (operator)
| Lvarop -- variable used as infix:
-- normal prefix variable in backquotes,
-- or infix variable (operator)
-- | Ltycon -- constructor starting with A-Z
-- subcase of Lcon
-- | Ltyvar -- variable starting with a-z
-- subcase of Lvar
| Lintlit -- integer literal
| Lcharlit -- character literal
| Lstringlit -- string literal
| Llbrace -- {
| Lrbrace -- }
| Lsemi -- ;
| Lequals -- =
| Lbar -- |
| Larrow -- ->
| Llparen -- (
| Lrparen -- )
| Lcomma -- ,
| Llbrack -- [
| Lrbrack -- ]
| Lunder -- _
| Lminus -- -
| Lslash -- \
| Lmodule
| Linfixl
| Linfixr
| Linfix
| Lext
| Ldata
| Lif
| Lthen
| Lelse
| Llet
| Lin
| Lcase
| Lof
| Lwhere
| Leof deriving (Eq, Show{-was:Text-})
{-
Lexing rules:
case (
if next is \, -> Llparen
if next is symbol, take symbols and expect closing ) -> Lvar
if next is :, take tail-ident-chars, expect closing ) -> Lcon
otherwise -> Llparen
case `
if next A-Z, take tail-ident-chars, expect ` -> Lconop
if next a-z, take tail-ident-chars, expect ` -> Lvarop
otherwise -> error
case A-Z
take tail-ident-chars -> Lcon
case a-z
take tail-ident-chars -> Lvar
case 0-9
take 0-9s -> Lintlit
case '
expect a lit-char, then ' -> charlit
case "
expect lit-chars, then " -> stringlit
case {
case - -> run_comment
otherwise -> Llbrace
case } -> Lrbrace
case ) -> Lrparen
case [ -> Llbrack
case ] -> Lrbrack
case ; -> Lsemi
case , -> Lcomma
case _ -> Lunder
case -
case - -> line_comment
case > -> Larrow
otherwise -> Lminus
case # in column 1: this is a preprocessor line
case :!#$%&*+./<=>?@\^|~
take symbols, then case resulting
"=" -> Lequals
"|" -> Lbar
"\" -> Lslash
otherwise
if starts with : -> Lconop
else -> lvarop
-}
-- ==========================================================--
--
leLex :: Int -> Int -> String -> [Token]
leLex l n []
= repeat (99997, 99997, Leof, "")
leLex l n ('(':[])
= [(l, n, Llparen, ")")]
leLex l n ('(':c:cs)
| c == ':'
= case leChunk (n+1) leIsTailChar cs of
(restSym, nn, restInput) -> case restInput of
[] -> leFail l nn " ) expected"
(')':as) -> (l, n, Lvar, c:restSym) : leLex l (nn+1) as
(_:_) -> leFail l nn " ) expected"
| c == '\\'
= (l, n, Llparen, "(") : leLex l (n+1) (c:cs)
| leIsSymbol c
= case leChunk (n+1) leIsSymbol cs of
(restSym, nn, restInput) -> case restInput of
[] -> leFail l nn " ) expected"
(')':as) -> (l, n, Lvar, c:restSym) : leLex l (nn+1) as
(_:_) -> leFail l nn " ) expected"
| otherwise
= (l, n, Llparen, "(") : leLex l (n+1) (c:cs)
leLex l n ('`':c:cs)
| isAlpha c
= case leChunk (n+1) isAlpha cs of
(restSym, nn, restInput) -> case restInput of
[] -> leFail l nn " ` expected"
('`':as) -> (l, n, if isUpper c then Lconop else Lvarop, c:restSym)
: leLex l (nn+1) as
(_:_) -> leFail l nn " ` expected"
| otherwise
= leFail l n "Bad infix operator"
leLex l n ('"':cs)
= case leTakeLitChars True l (n+1) cs of
(restSym, nn, restInput) -> case restInput of
[] -> leFail l nn " \" expected"
('"':as) -> (l, n, Lstringlit, restSym) : leLex l (nn+1) as
(_:_) -> leFail l nn " \" expected"
leLex l n ('\'':cs)
= case leTakeLitChars False l (n+1) cs of
(restSym, nn, restInput) -> case restInput of
[] -> leFail l nn " ' expected"
('\'':as) -> case restSym of
[_] -> (l, n, Lcharlit, restSym) : leLex l (nn+1) as
_ -> leFail l (n+1) "Bad character literal"
(_:_) -> leFail l nn " ' expected"
leLex l n ('}':cs)
= (l, n, Lrbrace, "}") : leLex l (n+1) cs
leLex l n (')':cs)
= (l, n, Lrparen, ")") : leLex l (n+1) cs
leLex l n ('[':cs)
= (l, n, Llbrack, "[") : leLex l (n+1) cs
leLex l n (']':cs)
= (l, n, Lrbrack, "]") : leLex l (n+1) cs
leLex l n (';':cs)
= (l, n, Lsemi, ";") : leLex l (n+1) cs
leLex l n (',':cs)
= (l, n, Lcomma, ",") : leLex l (n+1) cs
leLex l n ('_':cs)
= (l, n, Lunder, "_") : leLex l (n+1) cs
leLex l n ('{':cs)
= case cs of
[] -> [(l, n, Llbrace, "}")]
('-':cs2) -> leLexRComment l (n+2) cs2
(_:_) -> (l, n, Llbrace, "}") : leLex l (n+1) cs
leLex l n ('-':cs)
= case cs of
[] -> [(l, n, Lminus, "-")]
('-':cs2) -> leLexLComment l (n+2) cs2
('>':cs3) -> (l, n, Larrow, "->") : leLex l (n+2) cs3
('}':cs3) -> leFail l n "Misplaced -}"
(_:_) -> (l, n, Lminus, "-") : leLex l (n+1) cs
leLex l n (' ':cs)
= leLex l (n+1) cs
leLex l n ('\n':cs)
= leLex (l+1) 1 cs
leLex l n ('\t':cs)
= leLex l (n - (n `mod` 8) + 9) cs
leLex l n (c:cs)
= if c == '#'
then if n == 1
then
{- This is a CPP line number thingy -}
let lineNoText = takeWhile isDigit (tail cs)
lineNo = leStringToInt lineNoText
nextLine = drop 1 (dropWhile ((/=) '\n') cs)
in
leLex lineNo 1 nextLine
else
{- it's a symbol starting with # -}
case leChunk (n+1) leIsSymbol cs of
(restSym, nn, restText) -> (l, n, Lvarop, c:restSym) :
leLex l nn restText
else
if isAlpha c
then case leChunk (n+1) leIsTailChar cs of
(restSym, nn, restText) -> (l, n, if isUpper c
then Lcon
else Lvar, c:restSym) :
leLex l nn restText
else
if isDigit c
then case leChunk (n+1) isDigit cs of
(restSym, nn, restText) -> (l, n, Lintlit, c:restSym) :
leLex l nn restText
else
if leIsSymbol c
then case leChunk (n+1) leIsSymbol cs of
(restSym, nn, restText) -> (l, n, if c == ':'
then Lconop
else Lvarop, c:restSym) :
leLex l nn restText
else
leFail l n ("Illegal character " ++ [c])
-- ==========================================================--
--
leChunk :: Int -> (Char -> Bool) -> String -> (String, Int, String)
leChunk n proper []
= ([], n, [])
leChunk n proper (c:cs)
| proper c
= case leChunk (n+1) proper cs of
(restId, col, restInput) -> (c:restId, col, restInput)
| otherwise
= ([], n, c:cs)
-- ==========================================================--
--
leTakeLitChars :: Bool -> Int -> Int -> String -> (String, Int, String)
leTakeLitChars d l n []
= leFail l n "End of file inside literal"
leTakeLitChars d l n ('\\':'\\':cs)
= case leTakeLitChars d l (n+2) cs of
(rest, col, left) -> ('\\':rest, col, left)
leTakeLitChars d l n ('\\':'n':cs)
= case leTakeLitChars d l (n+2) cs of
(rest, col, left) -> ('\n':rest, col, left)
leTakeLitChars d l n ('\\':'t':cs)
= case leTakeLitChars d l (n+2) cs of
(rest, col, left) -> ('\t':rest, col, left)
leTakeLitChars d l n ('\\':'"':cs)
= case leTakeLitChars d l (n+2) cs of
(rest, col, left) -> ('"':rest, col, left)
leTakeLitChars d l n ('\\':'\'':cs)
= case leTakeLitChars d l (n+2) cs of
(rest, col, left) -> ('\'':rest, col, left)
leTakeLitChars d l n ('"':cs)
| d = ([], n, ('"':cs))
| not d = case leTakeLitChars d l (n+1) cs of
(rest, col, left) -> ('"':rest, col, left)
leTakeLitChars d l n ('\'':cs)
| not d = ([], n, ('\'':cs))
| d = case leTakeLitChars d l (n+1) cs of
(rest, col, left) -> ('\'':rest, col, left)
leTakeLitChars d l n ('\n':cs)
= leFail l n "Literal exceeds line"
leTakeLitChars d l n ('\t':cs)
= leFail l n "Literal contains tab"
leTakeLitChars d l n (c:cs)
= case leTakeLitChars d l (n+1) cs of
(rest, col, left) -> (c:rest, col, left)
-- ==========================================================--
--
leLexLComment :: Int -> Int -> String -> [Token]
leLexLComment l n cs
= leLex (l+1) 1 (drop 1 (dropWhile ((/=) '\n') cs))
-- ==========================================================--
--
leLexRComment :: Int -> Int -> String -> [Token]
leLexRComment l n []
= leFail l n "End of file inside {- ... -} comment"
leLexRComment l n ('-':'}':cs)
= leLex l (n+2) cs
leLexRComment l n ('\n':cs)
= leLexRComment (l+1) 1 cs
leLexRComment l n ('\t':cs)
= leLexRComment l (n - (n `mod` 8) + 9) cs
leLexRComment l n (c:cs)
= leLexRComment l (n+1) cs
-- ==========================================================--
--
leIsSymbol :: Char -> Bool
leIsSymbol c = c `elem` leSymbols
leSymbols = ":!#$%&*+./<=>?\\@^|~"
-- ==========================================================--
--
leIsTailChar :: Char -> Bool
leIsTailChar c
= isLower c ||
isUpper c ||
isDigit c ||
c == '\'' ||
c == '_' ||
c == '\''
-- ==========================================================--
--
leIsLitChar :: Char -> Bool
leIsLitChar c
= c /= '\n' &&
c /= '\t' &&
c /= '\'' &&
c /= '"'
-- ==========================================================--
--
leStringToInt :: String -> Int
leStringToInt
= let s2i [] = 0
s2i (d:ds) = (fromEnum d - fromEnum '0') + 10 *s2i ds
in s2i . reverse
-- ==========================================================--
--
leFail l n m
= faiL ("Lexical error, line " ++ show l ++ ", col " ++ show n ++
":\n " ++ m )
faiL m = error ( "\n\n" ++ m ++ "\n" )
-- ==========================================================--
-- === end Lexer.hs ===--
-- ==========================================================--
-- ==========================================================--
-- === Keyword spotting, and offside rule implementation ===--
-- === Layout.hs ===--
-- ==========================================================--
--module Layout
-- ==========================================================--
--
laKeyword :: Token -> Token
laKeyword (l, n, what, text)
= let
f Lvarop "=" = Lequals
f Lvarop "|" = Lbar
f Lvarop "\\" = Lslash
f Lvar "module" = Lmodule
f Lvar "infix" = Linfix
f Lvar "infixl" = Linfixl
f Lvar "infixr" = Linfixr
f Lvar "ext" = Lext
f Lvar "data" = Ldata
f Lvar "if" = Lif
f Lvar "then" = Lthen
f Lvar "else" = Lelse
f Lvar "let" = Llet
f Lvar "in" = Lin
f Lvar "case" = Lcase
f Lvar "of" = Lof
f Lvar "where" = Lwhere
f item words = item
in
(l, n, f what text, text)
-- ==========================================================--
--
laLayout :: Int -> [Int] -> [Token] -> [Token]
laLayout l s []
= laRbrace (length s - 1) 99999 99999
laLayout l s (t1:[])
= t1 : laRbrace (length s - 1) 99998 99998
laLayout l (s:ss) (t1@(l1, n1, w1, c1) :
t2@(l2, n2, w2, c2) : ts)
| w1 `elem` [Lof, Llet, Lwhere] && w2 /= Llbrace
= t1 :
(l1, n1, Llbrace, "{") :
t2 :
laLayout l2 (n2:s:ss) ts
| l1 == l
= t1 :
laLayout l (s:ss) (t2:ts)
| n1 > s
= t1 :
laLayout l1 (s:ss) (t2:ts)
| n1 == s
= (l1, n1, Lsemi, ";") :
t1 :
laLayout l1 (s:ss) (t2:ts)
| n1 < s
= (l1, n1, Lrbrace, "}") :
laLayout l ss (t1:t2:ts)
-- ==========================================================--
--
laRbrace c l n
= take c (repeat (l, n, Lrbrace, "}"))
-- ==========================================================--
--
laMain :: String -> [Token]
laMain
= laLayout 1 [0] . map laKeyword . leLex 1 1
-- ==========================================================--
-- === end Layout.hs ===--
-- ==========================================================--
-- ==========================================================--
-- === Abstract syntax for modules ===--
-- === AbsSyntax.hs ===--
-- ==========================================================--
--module AbsSyntax where
--1.3:data Maybe a = Nothing
-- | Just a
type AList a b = [(a, b)]
type Id = String
data Module
= MkModule Id [TopDecl]
deriving (Show{-was:Text-})
data FixityDecl
= MkFixDecl Id (Fixity, Int)
deriving (Show{-was:Text-})
data DataDecl
= MkDataDecl Id ([Id], [ConstrAltDecl])
deriving (Show{-was:Text-})
data TopDecl
= MkTopF FixityDecl
| MkTopD DataDecl
| MkTopV ValBind
deriving (Show{-was:Text-})
data Fixity
= InfixL
| InfixR
| InfixN
deriving (Eq,Show{-was:Text-})
type ConstrAltDecl
= (Id, [TypeExpr])
data TypeExpr = TypeVar Id
| TypeArr TypeExpr TypeExpr
| TypeCon Id [TypeExpr]
| TypeList TypeExpr
| TypeTuple [TypeExpr]
deriving (Show{-was:Text-})
data ValBind
= MkValBind Int Lhs Expr
deriving (Show{-was:Text-})
data Lhs
= LhsPat Pat
| LhsVar Id [Pat]
deriving (Show{-was:Text-})
data Pat
= PatVar Id
| PatCon Id [Pat]
| PatWild
| PatList [Pat]
| PatTuple [Pat]
deriving (Show{-was:Text-})
data Expr
= ExprVar Id
| ExprCon Id
| ExprApp Expr Expr
| ExprLam [Pat] Expr
| ExprCase Expr [ExprCaseAlt]
| ExprLetrec [ValBind] Expr
| ExprWhere Expr [ValBind]
| ExprGuards [(Expr, Expr)]
| ExprLiteral Literal
| ExprList [Expr]
| ExprTuple [Expr]
| ExprIf Expr Expr Expr
| ExprBar
| ExprFail
deriving (Show{-was:Text-})
data ExprCaseAlt
= MkExprCaseAlt Pat Expr
deriving (Show{-was:Text-})
data Literal
= LiteralInt Int
| LiteralChar Char
| LiteralString String
deriving (Show{-was:Text-})
-- ==========================================================--
-- === end AbsSyntax.hs ===--
-- ==========================================================--
-- ==========================================================--
-- === Parser generics ===--
-- === ParserGeneric.hs ===--
-- ==========================================================--
--module ParserGeneric
type PEnv = AList String (Fixity, Int)
data PResult a = POk PEnv [Token] a
| PFail Token
type Parser a = PEnv -> [Token] -> PResult a
type PEntry = (Bool, Expr, Id)
-- ==========================================================--
--
pgItem :: Lex -> Parser String
pgItem x env [] = PFail pgEOF
pgItem x env ((l, n, w, t):toks)
| x == w = POk env toks t
| otherwise = PFail (l, n, w, t)
-- ==========================================================--
--
pgAlts :: [Parser a] -> Parser a
pgAlts ps env toks
= let
useAlts [] bestErrTok
= PFail bestErrTok
useAlts (p:ps) bestErrTok
= case p env toks of
PFail someErrTok -> useAlts ps (further someErrTok bestErrTok)
successful_parse -> successful_parse
further x1@(l1, n1, w1, t1) x2@(l2, n2, w2, t2)
= if l2 > l1 then x2
else if l1 > l2 then x1
else if n1 > n2 then x1
else x2
in
useAlts ps (head (toks ++ [pgEOF]))
-- ==========================================================--
--
pgThen2 :: (a -> b -> c) ->
Parser a ->
Parser b ->
Parser c
pgThen2 combine p1 p2 env toks
= case p1 env toks of
{
PFail tok1
-> PFail tok1 ;
POk env1 toks1 item1
-> case p2 env1 toks1 of
{
PFail tok2
-> PFail tok2 ;
POk env2 toks2 item2
-> POk env2 toks2 (combine item1 item2)
}
}
-- ==========================================================--
--
pgThen3 :: (a -> b -> c -> d) ->
Parser a ->
Parser b ->
Parser c ->
Parser d
pgThen3 combine p1 p2 p3 env toks
= case p1 env toks of
{
PFail tok1
-> PFail tok1 ;
POk env1 toks1 item1
-> case p2 env1 toks1 of
{
PFail tok2
-> PFail tok2 ;
POk env2 toks2 item2
-> case p3 env2 toks2 of
{
PFail tok3
-> PFail tok3 ;
POk env3 toks3 item3
-> POk env3 toks3 (combine item1 item2 item3)
}
}
}
-- ==========================================================--
--
pgThen4 :: (a -> b -> c -> d -> e) ->
Parser a ->
Parser b ->
Parser c ->
Parser d ->
Parser e
pgThen4 combine p1 p2 p3 p4 env toks
= case p1 env toks of
{
PFail tok1
-> PFail tok1 ;
POk env1 toks1 item1
-> case p2 env1 toks1 of
{
PFail tok2
-> PFail tok2 ;
POk env2 toks2 item2
-> case p3 env2 toks2 of
{
PFail tok3
-> PFail tok3 ;
POk env3 toks3 item3
-> case p4 env3 toks3 of
{
PFail tok4
-> PFail tok4 ;
POk env4 toks4 item4
-> POk env4 toks4 (combine item1 item2 item3 item4)
}
}
}
}
-- ==========================================================--
--
pgZeroOrMore :: Parser a -> Parser [a]
pgZeroOrMore p env toks
= case p env toks of
{
PFail tok1
-> POk env toks [] ;
POk env1 toks1 item1
-> case pgZeroOrMore p env1 toks1 of
{
PFail tok2
-> POk env1 toks1 [item1] ;
POk env2 toks2 item2_list
-> POk env2 toks2 (item1 : item2_list)
}
}
-- ==========================================================--
--
pgOneOrMore :: Parser a -> Parser [a]
pgOneOrMore p
= pgThen2 (:) p (pgZeroOrMore p)
-- ==========================================================--
--
pgApply :: (a -> b) -> Parser a -> Parser b
pgApply f p env toks
= case p env toks of
{
PFail tok1
-> PFail tok1 ;
POk env1 toks1 item1
-> POk env1 toks1 (f item1)
}
-- ==========================================================--
--
pgTwoOrMoreWithSep :: Parser a -> Parser b -> Parser [a]
pgTwoOrMoreWithSep p psep
= pgThen4
(\i1 s1 i2 rest -> i1:i2:rest)
p
psep
p
(pgZeroOrMore (pgThen2 (\sep x -> x) psep p))
-- ==========================================================--
--
pgOneOrMoreWithSep :: Parser a -> Parser b -> Parser [a]
pgOneOrMoreWithSep p psep
= pgThen2 (:) p (pgZeroOrMore (pgThen2 (\sep x -> x) psep p))
-- ==========================================================--
--
pgZeroOrMoreWithSep :: Parser a -> Parser b -> Parser [a]
pgZeroOrMoreWithSep p psep
= pgAlts
[
pgOneOrMoreWithSep p psep,
pgApply (\x -> x:[]) p,
pgEmpty []
]
-- ==========================================================--
--
pgOptional :: Parser a -> Parser (Maybe a)
pgOptional p env toks
= case p env toks of
{
PFail tok1
-> POk env toks Nothing ;
POk env2 toks2 item2
-> POk env2 toks2 (Just item2)
}
-- ==========================================================--
--
pgGetLineNumber :: Parser a -> Parser (Int, a)
pgGetLineNumber p env toks
= let
lineNo = case (head (toks ++ [pgEOF])) of (l, n, w, t) -> l
in
case p env toks of
{
PFail tok1
-> PFail tok1 ;
POk env2 toks2 item2
-> POk env2 toks2 (lineNo, item2)
}
-- ==========================================================--
--
pgEmpty :: a -> Parser a
pgEmpty item env toks
= POk env toks item
-- ==========================================================--
--
pgEOF :: Token
pgEOF = (88888, 88888, Lvar, "*** Unexpected end of source! ***")
-- ============================================================--
-- === Some kludgey stuff for implementing the offside rule ===--
-- ============================================================--
-- ==========================================================--
--
pgEatEnd :: Parser ()
pgEatEnd env []
= POk env [] ()
pgEatEnd env (tok@(l, n, w, t):toks)
| w == Lsemi || w == Lrbrace = POk env toks ()
| otherwise = POk env (tok:toks) ()
-- ==========================================================--
--
pgDeclList :: Parser a -> Parser [a]
pgDeclList p
= pgThen3 (\a b c -> b) (pgItem Llbrace)
(pgOneOrMoreWithSep p (pgItem Lsemi))
pgEatEnd
-- ==========================================================--
-- === end ParserGeneric.hs ===--
-- ==========================================================--
-- ==========================================================--
-- === The parser. ===--
-- === Parser.hs ===--
-- ==========================================================--
--module Parser where
{- FIX THESE UP -}
utLookupDef env k def
= head ( [ vv | (kk,vv) <- env, kk == k] ++ [def] )
panic = error
{- END FIXUPS -}
paLiteral :: Parser Literal
paLiteral
= pgAlts
[
pgApply (LiteralInt . leStringToInt) (pgItem Lintlit),
pgApply (LiteralChar . head) (pgItem Lcharlit),
pgApply LiteralString (pgItem Lstringlit)
]
paExpr
= pgAlts
[
paCaseExpr,
paLetExpr,
paLamExpr,
paIfExpr,
paUnaryMinusExpr,
hsDoExpr []
]
paUnaryMinusExpr
= pgThen2
(\minus (_, aexpr, _) ->
ExprApp (ExprApp (ExprVar "-") (ExprLiteral (LiteralInt 0))) aexpr)
paMinus
paAExpr
paCaseExpr
= pgThen4
(\casee expr off alts -> ExprCase expr alts)
(pgItem Lcase)
paExpr
(pgItem Lof)
(pgDeclList paAlt)
paAlt
= pgAlts
[
pgThen4
(\pat arrow expr wheres
-> MkExprCaseAlt pat (pa_MakeWhereExpr expr wheres))
paPat
(pgItem Larrow)
paExpr
(pgOptional paWhereClause),
pgThen3
(\pat agrdrhss wheres
-> MkExprCaseAlt pat
(pa_MakeWhereExpr (ExprGuards agrdrhss) wheres))
paPat
(pgOneOrMore paGalt)
(pgOptional paWhereClause)
]
paGalt
= pgThen4
(\bar guard arrow expr -> (guard, expr))
(pgItem Lbar)
paExpr
(pgItem Larrow)
paExpr
paLamExpr
= pgThen4
(\lam patterns arrow rhs -> ExprLam patterns rhs)
(pgItem Lslash)
(pgZeroOrMore paAPat)
(pgItem Larrow)
paExpr
paLetExpr
= pgThen4
(\lett decls inn rhs -> ExprLetrec decls rhs)
(pgItem Llet)
paValdefs
(pgItem Lin)
paExpr
paValdefs
= pgApply pa_MergeValdefs (pgDeclList paValdef)
pa_MergeValdefs
= id
paLhs
= pgAlts
[
pgThen2 (\v ps -> LhsVar v ps) paVar (pgOneOrMore paPat),
pgApply LhsPat paPat
]
paValdef
= pgAlts
[
pgThen4
(\(line, lhs) eq rhs wheres
-> MkValBind line lhs (pa_MakeWhereExpr rhs wheres))
(pgGetLineNumber paLhs)
(pgItem Lequals)
paExpr
(pgOptional paWhereClause),
pgThen3
(\(line, lhs) grdrhss wheres
-> MkValBind line lhs
(pa_MakeWhereExpr (ExprGuards grdrhss) wheres))
(pgGetLineNumber paLhs)
(pgOneOrMore paGrhs)
(pgOptional paWhereClause)
]
pa_MakeWhereExpr expr Nothing
= expr
pa_MakeWhereExpr expr (Just whereClauses)
= ExprWhere expr whereClauses
paWhereClause
= pgThen2 (\x y -> y) (pgItem Lwhere) paValdefs
paGrhs
= pgThen4
(\bar guard equals expr -> (guard, expr))
(pgItem Lbar)
paExpr
(pgItem Lequals)
paExpr
paAPat
= pgAlts
[
pgApply PatVar paVar,
pgApply (\id -> PatCon id []) paCon,
pgApply (const PatWild) (pgItem Lunder),
pgApply PatTuple
(pgThen3 (\l es r -> es)
(pgItem Llparen)
(pgTwoOrMoreWithSep paPat (pgItem Lcomma))
(pgItem Lrparen)),
pgApply PatList
(pgThen3 (\l es r -> es)
(pgItem Llbrack)
(pgZeroOrMoreWithSep paPat (pgItem Lcomma))
(pgItem Lrbrack)),
pgThen3 (\l p r -> p)
(pgItem Llparen)
paPat
(pgItem Lrparen)
]
paPat
= pgAlts
[
pgThen2 (\c ps -> PatCon c ps)
paCon
(pgOneOrMore paAPat),
pgThen3 (\ap c pa -> PatCon c [ap,pa])
paAPat
paConop
paPat,
paAPat
]
paIfExpr
= pgThen4
(\iff c thenn (t,f) -> ExprIf c t f)
(pgItem Lif)
paExpr
(pgItem Lthen)
(pgThen3
(\t elsee f -> (t,f))
paExpr
(pgItem Lelse)
paExpr
)
paAExpr
= pgApply (\x -> (False, x, []))
(pgAlts
[
pgApply ExprVar paVar,
pgApply ExprCon paCon,
pgApply ExprLiteral paLiteral,
pgApply ExprList paListExpr,
pgApply ExprTuple paTupleExpr,
pgThen3 (\l e r -> e) (pgItem Llparen) paExpr (pgItem Lrparen)
]
)
paListExpr
= pgThen3 (\l es r -> es)
(pgItem Llbrack)
(pgZeroOrMoreWithSep paExpr (pgItem Lcomma))
(pgItem Lrbrack)
paTupleExpr
= pgThen3 (\l es r -> es)
(pgItem Llparen)
(pgTwoOrMoreWithSep paExpr (pgItem Lcomma))
(pgItem Lrparen)
paVar = pgItem Lvar
paCon = pgItem Lcon
paVarop = pgItem Lvarop
paConop = pgItem Lconop
paMinus = pgItem Lminus
paOp
= pgAlts [
pgApply (\x -> (True, ExprVar x, x)) paVarop,
pgApply (\x -> (True, ExprCon x, x)) paConop,
pgApply (\x -> (True, ExprVar x, x)) paMinus
]
paDataDecl
= pgThen2
(\dataa useful -> useful)
(pgItem Ldata)
paDataDecl_main
paDataDecl_main
= pgThen4
(\name params eq drhs -> MkDataDecl name (params, drhs))
paCon
(pgZeroOrMore paVar)
(pgItem Lequals)
(pgOneOrMoreWithSep paConstrs (pgItem Lbar))
paConstrs
= pgThen2
(\con texprs -> (con, texprs))
paCon
(pgZeroOrMore paAType)
paType
= pgAlts
[
pgThen3
(\atype arrow typee -> TypeArr atype typee)
paAType
(pgItem Larrow)
paType,
pgThen2
TypeCon
paCon
(pgOneOrMore paAType),
paAType
]
paAType
= pgAlts
[
pgApply TypeVar paVar,
pgApply (\tycon -> TypeCon tycon []) paCon,
pgThen3
(\l t r -> t)
(pgItem Llparen)
paType
(pgItem Lrparen),
pgThen3
(\l t r -> TypeList t)
(pgItem Llbrack)
paType
(pgItem Lrbrack),
pgThen3
(\l t r -> TypeTuple t)
(pgItem Llparen)
(pgTwoOrMoreWithSep paType (pgItem Lcomma))
(pgItem Lrparen)
]
paInfixDecl env toks
= let dump (ExprVar v) = v
dump (ExprCon c) = c
in
pa_UpdateFixityEnv
(pgThen3
(\assoc prio name -> MkFixDecl name (assoc, prio))
paInfixWord
(pgApply leStringToInt (pgItem Lintlit))
(pgApply (\(_, op, _) -> dump op) paOp)
env
toks
)
paInfixWord
= pgAlts
[
pgApply (const InfixL) (pgItem Linfixl),
pgApply (const InfixR) (pgItem Linfixr),
pgApply (const InfixN) (pgItem Linfix)
]
pa_UpdateFixityEnv (PFail tok)
= PFail tok
pa_UpdateFixityEnv (POk env toks (MkFixDecl name assoc_prio))
= let
new_env = (name, assoc_prio) : env
in
POk new_env toks (MkFixDecl name assoc_prio)
paTopDecl
= pgAlts
[
pgApply MkTopF paInfixDecl,
pgApply MkTopD paDataDecl,
pgApply MkTopV paValdef
]
paModule
= pgThen4
(\modyule name wheree topdecls -> MkModule name topdecls)
(pgItem Lmodule)
paCon
(pgItem Lwhere)
(pgDeclList paTopDecl)
parser_test toks
= let parser_to_test
= --paPat
--paExpr
--paValdef
--pgZeroOrMore paInfixDecl
--paDataDecl
--paType
paModule
--pgTwoOrMoreWithSep (pgItem Lsemi) (pgItem Lcomma)
in
parser_to_test hsPrecTable toks
-- ==============================================--
-- === The Operator-Precedence parser (yuck!) ===--
-- ==============================================--
--
-- ==========================================================--
--
hsAExprOrOp
= pgAlts [paAExpr, paOp]
hsDoExpr :: [PEntry] -> Parser Expr
-- [PaEntry] is a stack of operators and atomic expressions
-- hsDoExpr uses a parser (hsAexpOrOp :: Parsr PaEntry) for atomic
-- expressions or operators
hsDoExpr stack env toks =
let
(validIn, restIn, parseIn, err)
= case hsAExprOrOp env toks of
POk env1 toks1 item1
-> (True, toks1, item1, panic "hsDoExpr(1)")
PFail err
-> (False, panic "hsDoExpr(2)", panic "hsDoExpr(3)", err)
(opIn, valueIn, nameIn)
= parseIn
(assocIn, priorIn)
= utLookupDef env nameIn (InfixL, 9)
shift
= hsDoExpr (parseIn:stack) env restIn
in
case stack of
s1:s2:s3:ss
| validIn && opS2 && opIn && priorS2 > priorIn
-> reduce
| validIn && opS2 && opIn && priorS2 == priorIn
-> if assocS2 == InfixL &&
assocIn == InfixL
then reduce
else
if assocS2 == InfixR &&
assocIn == InfixR
then shift
else PFail (head toks) -- Because of ambiguousness
| not validIn && opS2
-> reduce
where
(opS1, valueS1, nameS1) = s1
(opS2, valueS2, nameS2) = s2
(opS3, valueS3, nameS3) = s3
(assocS2, priorS2) = utLookupDef env nameS2 (InfixL, 9)
reduce = hsDoExpr ((False, ExprApp (ExprApp valueS2 valueS3)
valueS1, [])
: ss) env toks
s1:s2:ss
| validIn && (opS1 || opS2) -> shift
| otherwise -> reduce
where
(opS1, valueS1, nameS1) = s1
(opS2, valueS2, nameS2) = s2
reduce = hsDoExpr ((False, ExprApp valueS2 valueS1, []) : ss)
env toks
(s1:[])
| validIn -> shift
| otherwise -> POk env toks valueS1
where
(opS1, valueS1, nameS1) = s1
[]
| validIn -> shift
| otherwise -> PFail err
-- ==========================================================--
-- === end Parser.hs ===--
-- ==========================================================--
hsPrecTable :: PEnv
hsPrecTable = [
("-", (InfixL, 6)),
("+", (InfixL, 6)),
("*", (InfixL, 7)),
("div", (InfixN, 7)),
("mod", (InfixN, 7)),
("<", (InfixN, 4)),
("<=", (InfixN, 4)),
("==", (InfixN, 4)),
("/=", (InfixN, 4)),
(">=", (InfixN, 4)),
(">", (InfixN, 4)),
("C:", (InfixR, 5)),
("++", (InfixR, 5)),
("\\", (InfixN, 5)),
("!!", (InfixL, 9)),
(".", (InfixR, 9)),
("^", (InfixR, 8)),
("elem", (InfixN, 4)),
("notElem", (InfixN, 4)),
("||", (InfixR, 2)),
("&&", (InfixR, 3))]
main = do
cs <- readFile "big_big_test.hs" -- TODO: cs <- getContents
let tokens = laMain cs
let parser_res = parser_test tokens
putStr (showx parser_res)
showx (PFail t)
= "\n\nFailed on token: " ++ show t ++ "\n\n"
showx (POk env toks result)
= "\n\nSucceeded, with:\n Size env = " ++ show (length env) ++
"\n Next token = " ++ show (head toks) ++
"\n\n Result = " ++ show result ++ "\n\n"
-- ==========================================================--
--
layn :: [[Char]] -> [Char]
layn x = f 1 x
where
f :: Int -> [[Char]] -> [Char]
f n [] = []
f n (a:x) = rjustify 4 (show n) ++") "++a++"\n"++f (n+1) x
-- ==========================================================--
--
rjustify :: Int -> [Char] -> [Char]
rjustify n s = spaces (n - length s)++s
where
spaces :: Int -> [Char]
spaces m = copy m ' '
copy :: Int -> a -> [a]
copy n x = take (max 0 n) xs where xs = x:xs
| roberth/uu-helium | test/benchmarks/Parser.hs | gpl-3.0 | 37,406 | 0 | 19 | 14,433 | 10,629 | 5,745 | 4,884 | 867 | 26 |
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
-- |
-- Copyright : (c) 2012 Benedikt Schmidt
-- License : GPL v3 (see LICENSE)
--
-- Maintainer : Benedikt Schmidt <[email protected]>
--
-- Unit tests for the functions dealing with term algebra and related notions.
module Term.UnitTests -- (tests)
where
import Term.Substitution
import Term.Subsumption
import Term.Builtin.Convenience
import Term.Unification
import Term.Rewriting.Norm
import Term.Narrowing.Variants
import Term.Positions
import Text.PrettyPrint.Class
import Data.List
import Data.Maybe
import Prelude
import Test.HUnit
import Control.Monad.Reader
-- import Data.Monoid
testEqual :: (Eq a, Show a) => String -> a -> a -> Test
testEqual t a b = TestLabel t $ TestCase $ assertEqual t b a
testTrue :: String -> Bool -> Test
testTrue t a = TestLabel t $ TestCase $ assertBool t a
-- *****************************************************************************
-- Tests for Matching
-- *****************************************************************************
testsMatching :: MaudeHandle -> Test
testsMatching hnd = TestLabel "Tests for Matching" $
TestList
[ testTrue "a" (propMatchSound hnd f1 f2)
, testTrue "b" (propMatchSound hnd (pair(f1,inv(f2))) (pair(f1,inv(f2))))
, testTrue "c" (propMatchSound hnd t1 t2)
, testTrue "d" (propMatchSound hnd (x1 # f1) f1)
, testTrue "e" $ null (solveMatchLNTerm (pair(x1,x2) `matchWith` pair(x1,x1)) `runReader` hnd)
]
where
t1 = expo (inv(pair(f1,f2)), f2 # (inv f2) # f3 # f4 # f2)
t2 = expo (inv(pair(f1,f2)), f3 # (inv f2) # f2 # x1 # f5 # f2)
propMatchSound :: MaudeHandle -> LNTerm -> LNTerm -> Bool
propMatchSound mhnd t1 p = all (\s -> applyVTerm s t1 == applyVTerm s p) substs
where substs = solveMatchLNTerm (t1 `matchWith` p) `runReader` mhnd
-- *****************************************************************************
-- Tests for Unification
-- *****************************************************************************
testsUnify :: MaudeHandle -> Test
testsUnify mhnd = TestLabel "Tests for Unify" $
TestList
[ testTrue "a" (propUnifySound mhnd (pair(f1,inv(f2))) (pair(f1,inv(f2))))
, testTrue "b" (propUnifySound mhnd t1 t2)
, testTrue "c" (propUnifySound mhnd u1 u2)
, testTrue "d" (propUnifySound mhnd (sdec(x1,y1)) (sdec(senc(x2,x3), x4)))
, testTrue "e" (propUnifySound mhnd (fAppEMap (p2,x1)) (fAppEMap (p1,x2)))
]
where
t1 = expo (inv(pair(f1,f2)), f2 *: (inv f2) *: f3 *: f4 *: x2)
t2 = expo (inv(pair(f1,f2)), f3 *: (inv f2) *: f2 *: f4 *: f5 *: f2)
u1 = (f2 *: (inv f2) *: f3 *: f4 *: x2)
u2 = (f3 *: (inv f2) *: f2 *: f4 *: f5 *: f2)
propUnifySound :: MaudeHandle -> LNTerm -> LNTerm -> Bool
propUnifySound hnd t1 t2 = all (\s -> let s' = freshToFreeAvoiding s [t1,t2] in
applyVTerm s' t1 == applyVTerm s' t2) substs
&& not (null substs)
where
substs = unifyLNTerm [Equal t1 t2] `runReader` hnd
-- *****************************************************************************
-- Tests for Substitutions
-- *****************************************************************************
testsSubst :: Test
testsSubst = TestLabel "Tests for Substitution" $
TestList
[ -- introduce renaming for x3
testEqual "a" (substFromListVFresh [(lx1, p1), (lx2, x6), (lx3,x6), (lx5, p1)])
(composeVFresh (substFromListVFresh [(lx5, p1)])
(substFromList [(lx1, x5), (lx2, x3)]))
-- rename (fresh) x6 in s1b and do not mix up with x6 in s3f
, testEqual "b" s1b_o_s3f (composeVFresh s1b s3f)
-- drop x1 |-> p1 mapping from s1b, but apply to x2 |-> pair(x3,x1) in s3f
, testEqual "c" s1b_o_s4f (composeVFresh s1b s4f)
, testEqual "d" s4f_o_s3f (compose s4f s3f)
, testEqual "e" (substFromList [(lx1,f1), (lx2,f1)])
(mapRange (const f1) s4f)
, testTrue "f" (isRenaming (substFromListVFresh [(lx1,x3), (lx2,x2), (lx3,x1)]))
, testEqual "g" (substFromListVFresh [(lx1, f1)])
(extendWithRenaming [lx1] (substFromListVFresh [(lx1, f1)]))
, testEqual "h" (substFromListVFresh [(lx2, x1), (lx1, x2)])
(extendWithRenaming [lx1] (substFromListVFresh [(lx2, x1)]))
-- trivial, increase coverage
, testTrue "i" ((>0) . length $ show s1b)
, testTrue "j" ((>0) . length $ (render $ prettyLSubstVFresh s1b))
, testTrue "k" (not . null $ domVFresh s1b)
, testTrue "l" (not . null $ varsRangeVFresh s1b)
, testTrue "m" ((>0) . length $ show $ substToListOn [lx1] s4f)
, testTrue "n" ((<100) . size $ emptySubst)
, testTrue "o" ((<10000) . size $ s1b)
, testTrue "p" ((<100) . size $ emptySubstVFresh)
]
where
s1b = substFromListVFresh [(lx1, p1), (lx2, x6), (lx3, x6), (lx4, f1)]
s3f = substFromList [(lx8, x6), (lx2, pair(x2,x1))]
s1b_o_s3f = substFromListVFresh -- x2 not identified with x8
[(lx1, p1), (lx2, pair(x9, p1)), (lx3, x9), (lx4, f1), (lx6, x10), (lx8, x10)]
s4f = substFromList [(lx1, x6), (lx2, pair(x3,x1))]
s1b_o_s4f = substFromListVFresh
[(lx1, x8), (lx2, pair(x7, p1)), (lx3, x7), (lx4, f1), (lx6, x8)]
s4f_o_s3f = substFromList [(lx1, x6), (lx2, pair(pair(x3,x1),x6)), (lx8, x6)]
x15 = varTerm $ LVar "x" LSortMsg 15
x13 = varTerm $ LVar "x" LSortMsg 13
x20 = varTerm $ LVar "x" LSortMsg 20
x22 = varTerm $ LVar "x" LSortMsg 22
-- *****************************************************************************
-- Tests for Subsumption
-- *****************************************************************************
testsSubs :: MaudeHandle -> Test
testsSubs mhnd = TestLabel "Tests for Subsumption" $ TestList
[ tct Nothing f1 f2
, tct (Just EQ) x1 x2
, tct (Just LT) x1 (x1 *: x1)
, tct (Just GT) (x1 *: x1) x1
, tct (Just GT) (pair(f1 *: f2,f1)) (pair(f2 *: f1,x2))
, testEqual "a" [substFromList [(lx2, pair(x6,x7)), (lx3, p1)]]
(factorSubstVia [lx1]
(substFromList [(lx1,pair(pair(x6,x7),p1))])
(substFromList [(lx1,pair(x2,x3))]) `runReader` mhnd)
, testEqual "b" [substFromList [(lx2, pair(x6,x7)), (lx3, p1), (lx5, f1), (lx6,f2)]]
(factorSubstVia [lx1, lx5, lx6]
(substFromList [(lx1,pair(pair(x6,x7),p1)), (lx5,f1), (lx6,f2)])
(substFromList [(lx1,pair(x2,x3))]) `runReader` mhnd)
, testTrue "c" (eqTermSubs p1 p1 `runReader` mhnd)
]
where
tct res e1 e2 =
testEqual ("termCompareSubs "++ppLTerm e1++" "++ppLTerm e2) res (compareTermSubs e1 e2 `runReader` mhnd)
ppLTerm :: LNTerm -> String
ppLTerm = render . prettyNTerm
ppLSubst :: LNSubst -> String
ppLSubst = render . prettyLNSubst
-- *****************************************************************************
-- Tests for Norm
-- *****************************************************************************
testsNorm :: MaudeHandle -> Test
testsNorm hnd = TestLabel "Tests for normalization" $ TestList
[ tcn normBigTerm bigTerm
, tcn (expo(f3,f1 *: f4))
(expo(expo(f3,f4),f1 *: f1 *: f2 *: inv (inv (inv f1)) *: one *: expo(inv f2,one)))
, tcn (mult [f1, f1, f2]) (f1 *: (f1 *: f2))
, tcn (inv (f1 *: f2)) (inv f2 *: inv f1)
, tcn (f1 *: inv f2) (f1 *: inv f2)
, tcn (one::LNTerm) one
, tcn x6 (expo(expo(x6,inv x3),x3))
-- , testEqual "a" (normAC (p3 *: (p1 *: p2))) (mult [p1, p2, p3])
-- , testEqual "b" (normAC (p3 *: (p1 *: inv p3))) (mult [p1, p3, inv p3])
-- , testEqual "c" (normAC ((p1 *: p2) *: p3)) (mult [p1, p2, p3])
-- , testEqual "d" (normAC t1) (mult [p1, p2, p3, p4])
-- , testEqual "e" (normAC ((p1 # p2) *: p3)) (p3 *: (p1 # p2))
-- , testEqual "f" (normAC (p3 *: (p1 # p2))) (p3 *: (p1 # p2))
-- , testEqual "g" (normAC ((p3 *: p4) *: (p1 # p2))) (mult [p3, p4, p1 # p2])
]
where
tcn e1 e2 = testEqual ("norm "++ppLTerm e2) e1 (norm' e2 `runReader` hnd)
t1 = (p1 *: p2) *: (p3 *: p4)
-- *****************************************************************************
-- Tests for Term
-- *****************************************************************************
testsTerm :: Test
testsTerm = TestLabel "Tests for Terms" $ TestList
[ uncurry (testEqual "Terms: propSubtermReplace") (propSubtermReplace bigTerm [1,0]) ]
propSubtermReplace :: Ord a => Term a -> Position -> (Term a, Term a)
propSubtermReplace t p = (t,(t `replacePos` (t `atPos` p,p)))
bigTerm :: LNTerm
bigTerm = pair(pk(x1),
expo(expo (inv x3,
x2 *: x4 *: f1 *: one *: inv (f3 *: f4) *: f3 *: f4 *: inv one),
inv(expo(x2,one)) *: f2))
normBigTerm :: LNTerm
normBigTerm = pair(pk(x1),expo(inv x3,mult [f1, f2, x4]))
tcompare :: MaudeHandle -> Test
tcompare hnd =
TestLabel "Tests for variant order" $ TestList
[ testTrue "a" (run $ isNormalInstance t su1 su2)
, testTrue "b" $ not (run $ isNormalInstance t su1 su3)
, testTrue "c" $ (run $ leqSubstVariant t su5 su4)
, testTrue "d" $ not (run $ leqSubstVariant t su6 su4)
, testEqual "e" (run $ compareSubstVariant t su4 su4) (Just EQ)
, testEqual "f" (run $ compareSubstVariant t su5 su4) (Just LT)
, testEqual "g" (run $ compareSubstVariant t su4 su5) (Just GT)
, testEqual "h" (run $ compareSubstVariant t su6 su4) Nothing
]
where
run :: WithMaude a -> a
run m = runReader m hnd
t = pair(inv(x1) *: x2, inv(x3) *: x2)
su1 = substFromList [(lx1, x2)]
su2 = substFromList [(lx2, p1)]
su3 = substFromList [(lx3, x2)]
su4 = substFromListVFresh [(lx1, x4), (lx2, x4)]
su5 = substFromListVFresh [(lx1, p1), (lx2, p1)]
su6 = substFromListVFresh [(lx1, x4), (lx2, x4), (lx3, x4)]
testsVariant :: MaudeHandle -> Test
testsVariant hnd =
TestLabel "Tests for variant computation" $ TestList
[ testEqual "a" (computeVariantsCheck (sdec(x1, p1)) `runReader` hnd)
(toSubsts [ []
, [(lx1, senc(x2, p1))] ])
, testEqual "b" (computeVariantsCheck (x1 *: p1) `runReader` hnd)
(toSubsts [ []
, [(lx1, x2 *: inv(p1))]
, [(lx1, inv(p1))]
, [(lx1, one)]
, [(lx1, x2 *: inv(p1 *: x3))]
, [(lx1, inv(p1 *: x2))]
])
, testTrue "e" $ not (checkComplete (sdec(x1, p1)) (toSubsts [[]]) `runReader` hnd)
, testTrue "f" $ (checkComplete (sdec(x1, p1)) (toSubsts [[], [(lx1, senc(x1,p1))]])
`runReader` hnd)
]
where
toSubsts = map substFromListVFresh
testsSimple :: MaudeHandle -> Test
testsSimple _hnd =
TestLabel "Tests for simple functions" $ TestList
[ testTrue "" (size [bigTerm] > 0) ]
-- | All unification infrastructure unit tests.
tests :: FilePath -> IO Test
tests maudePath = do
mhnd <- startMaude maudePath allMaudeSig
return $ TestList [ testsVariant mhnd
, tcompare mhnd
, testsSubs mhnd
, testsTerm
, testsSubst
, testsNorm mhnd
, testsUnify mhnd
, testsSimple mhnd
, testsMatching mhnd
]
-- | Maude signatures with all builtin symbols.
allMaudeSig :: MaudeSig
allMaudeSig = mconcat
[ bpMaudeSig, msetMaudeSig
, pairMaudeSig, symEncMaudeSig, asymEncMaudeSig, signatureMaudeSig, revealSignatureMaudeSig, hashMaudeSig ]
-- testing in ghci
----------------------------------------------------------------------------------
te :: LNTerm
te = pair(inv(x1) *: x2, inv(x3) *: x2)
sub4, sub6 :: LNSubstVFresh
sub4 = substFromListVFresh [(lx1, x4), (lx2, x4)]
sub6 = substFromListVFresh [(lx1, x4), (lx2, x4), (lx3, x4)]
sub4', sub6' :: LNSubst
sub4' = freshToFreeAvoiding sub4 te
sub6' = freshToFreeAvoiding sub6 te
tevs :: [LVar]
tevs = frees te
runTest :: WithMaude a -> IO a
runTest m = do
hnd <- startMaude "maude" allMaudeSig
return $ m `runReader` hnd
{-
runTest $ matchLNTerm [ pair(xor [x5,x6],xor [x4,x5,x6]) `MatchWith` pair(x5,xor [x5,x4]) ]
should be matchable if next matchable also
runTest $ matchLNTerm [ pair(xor [x5,x6],xor [x4,x5,x6]) `MatchWith` pair(x5,xor [x5,x6]) ]
-}
-- convenience abbreviations
----------------------------------------------------------------------------------
pair, expo :: (Term a, Term a) -> Term a
expo = fAppExp
pair = fAppPair
inv :: Term a -> Term a
inv = fAppInv
union, mult :: Ord a => [Term a] -> Term a
union = fAppAC Union
mult = fAppAC Mult
one :: Term a
one = fAppOne
| kmilner/tamarin-prover | lib/term/src/Term/UnitTests.hs | gpl-3.0 | 13,105 | 0 | 19 | 3,348 | 4,493 | 2,478 | 2,015 | 214 | 1 |
-- Copyright 2016 TensorFlow authors.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TypeFamilies #-}
module TensorFlow.Build
( -- * Graph node types
ControlNode(..)
, Unique
-- * Ops
, explicitName
, implicitName
, opDef
, opDefWithName
, opName
, opType
, opAttr
, opInputs
, opControlInputs
-- * The Build monad
, GraphState
, renderedNodeDefs
, BuildT
, Build
, MonadBuild(..)
, addInitializer
, hoistBuildT
, evalBuildT
, runBuildT
, asGraphDef
, addGraphDef
, flushInitializers
, flushNodeBuffer
, summaries
-- * Creating and looking up Ops
, getOrAddOp
, addNewOp
, encodeOutput
, lookupNode
-- * Modifying all nodes in a Build action
, withStateLens
, withDevice
, withNameScope
, withNodeDependencies
) where
import Control.Monad.Catch (MonadThrow, MonadCatch, MonadMask)
import Control.Monad.Fix (MonadFix(..))
import Control.Monad.IO.Class (MonadIO(..))
import Control.Monad.Trans.Class (MonadTrans(..))
import Control.Monad.Trans.State.Strict(StateT(..), mapStateT, evalStateT)
import Data.Default (def)
import Data.Functor.Identity (Identity(..))
import qualified Data.Map.Strict as Map
import Data.Monoid ((<>))
import qualified Data.Set as Set
import Data.Set (Set)
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as Text
import Lens.Family2 (Lens', (.~), (^.), (&))
import Lens.Family2.State.Strict (MonadState, use, uses, (.=), (<>=), (%=))
import Lens.Family2.Unchecked (lens)
import Proto.Tensorflow.Core.Framework.Graph
( GraphDef
, node
)
import Proto.Tensorflow.Core.Framework.NodeDef
( NodeDef
, attr
, input
, device
, name
, op
)
import TensorFlow.Orphans ()
import TensorFlow.Output
newtype Unique = Unique Int
deriving (Eq, Ord, Enum)
--------------
implicitName :: PendingNodeName
implicitName = ImplicitName
explicitName :: Text -> PendingNodeName
explicitName = ExplicitName
newtype Scope = Scope {unScope :: Text}
deriving (Eq, Ord, IsString)
instance Show Scope where
show = show . unScope
opDef :: OpType -> OpDef
opDef = opDefWithName ImplicitName
opDefWithName :: PendingNodeName -> OpType -> OpDef
opDefWithName n t = OpDef
{ _opName = n
, _opType = t
, _opAttrs = Map.empty
, _opInputs = []
, _opControlInputs = []
}
data GraphState = GraphState
{ _renderedNodes :: !(Map.Map PendingNode NodeDef)
-- ^ Nodes which have been rendered. Keeps track of the unique ID we
-- assign each implicitly-named node. Also prevents us from adding the
-- same node (implicit or explicit) more than once to the nodeBuffer.
, _renderedNodeDefs :: !(Map.Map NodeName NodeDef)
-- ^ The NodeDefs of nodes which have been rendered. Used by the
-- Gradient module to inspect the node graph.
, _nodeBuffer :: [NodeDef]
-- ^ A list of nodes that should be passed to TensorFlow during
-- the next call to Session.extend (TF_ExtendGraph).
, _nextUnique :: !Unique
-- ^ Unique ID for the next node
-- TODO(judahjacobson): watch for clashes between auto and user names.
, _defaultDevice :: !(Maybe Device)
, _currentScope :: [Scope]
, _defaultControlInputs :: !(Set NodeName)
, _initializationNodes :: [NodeName]
-- ^ The nodes to run next time a TF.run is issued, typically
-- variable initializers.
, _summaries :: [Output]
-- ^ The tensors for summary (ByteString type)
}
-- | A node definition without its final name. Used as a key in the
-- "renderedNodes" map.
-- The NodeDef contained inside has an empty "name" field.
data PendingNode = PendingNode [Scope] !PendingNodeName !NodeDef
deriving (Eq, Ord)
-- Returns an _incomplete_ NodeDef. The name is fixed by addNewOpFromPending.
pendingNodeDef :: PendingNode -> NodeDef
pendingNodeDef (PendingNode _ _ n) = n
initGraphState :: GraphState
initGraphState =
GraphState Map.empty Map.empty [] (Unique 0) Nothing [] Set.empty [] []
renderedNodes :: Lens' GraphState (Map.Map PendingNode NodeDef)
renderedNodes = lens _renderedNodes (\g x -> g { _renderedNodes = x })
renderedNodeDefs :: Lens' GraphState (Map.Map NodeName NodeDef)
renderedNodeDefs = lens _renderedNodeDefs (\g x -> g { _renderedNodeDefs = x })
nodeBuffer :: Lens' GraphState [NodeDef]
nodeBuffer = lens _nodeBuffer (\g x -> g { _nodeBuffer = x })
nextUnique :: Lens' GraphState Unique
nextUnique = lens _nextUnique (\g x -> g { _nextUnique = x })
defaultDevice :: Lens' GraphState (Maybe Device)
defaultDevice = lens _defaultDevice (\g x -> g { _defaultDevice = x })
currentScope :: Lens' GraphState [Scope]
currentScope = lens _currentScope (\g x -> g { _currentScope = x })
defaultControlInputs :: Lens' GraphState (Set NodeName)
defaultControlInputs = lens _defaultControlInputs
(\g x -> g { _defaultControlInputs = x })
initializationNodes :: Lens' GraphState [NodeName]
initializationNodes = lens _initializationNodes (\g x -> g { _initializationNodes = x })
summaries :: Lens' GraphState [Output]
summaries = lens _summaries (\g x -> g { _summaries = x })
-- | An action for building nodes in a TensorFlow graph.
-- Used to manage build state internally as part of the @Session@ monad.
newtype BuildT m a = BuildT (StateT GraphState m a)
deriving (Functor, Applicative, Monad, MonadIO, MonadTrans,
MonadState GraphState, MonadThrow, MonadCatch, MonadMask,
MonadFix)
-- | An action for building nodes in a TensorFlow graph.
type Build = BuildT Identity
-- | This is Control.Monad.Morph.hoist sans the dependency.
hoistBuildT :: (forall a . m a -> n a) -> BuildT m b -> BuildT n b
hoistBuildT f (BuildT m) = BuildT $ mapStateT f m
runBuildT :: BuildT m a -> m (a, GraphState)
runBuildT (BuildT f) = runStateT f initGraphState
evalBuildT :: Monad m => BuildT m a -> m a
evalBuildT (BuildT f) = evalStateT f initGraphState
-- | Lift a 'Build' action into a monad, including any explicit op renderings.
class Monad m => MonadBuild m where
build :: Build a -> m a
instance Monad m => MonadBuild (BuildT m) where
build = hoistBuildT $ return . runIdentity
-- | Get all the NodeDefs that have accumulated so far, and clear that buffer.
flushNodeBuffer :: MonadBuild m => m [NodeDef]
flushNodeBuffer = build $ do
ns <- use nodeBuffer
nodeBuffer .= []
return ns
-- | Get all the initializers that have accumulated so far, and clear
-- that buffer.
flushInitializers :: Monad m => BuildT m [NodeName]
flushInitializers = do
ns <- use initializationNodes
initializationNodes .= []
return ns
-- | Registers the given node to be executed before the next
-- 'TensorFlow.Session.run'.
addInitializer :: MonadBuild m => ControlNode -> m ()
addInitializer (ControlNode i) = build $ initializationNodes %= (i:)
-- | Produce a GraphDef proto representation of the nodes that are rendered in
-- the given 'Build' action.
asGraphDef :: Build a -> GraphDef
asGraphDef b = def & node .~ gs ^. nodeBuffer
where
gs = snd $ runIdentity $ runBuildT b
-- TODO: check against existing nodes for conflicts?
addGraphDef :: MonadBuild m => GraphDef -> m ()
addGraphDef g = build $ nodeBuffer <>= g ^. node
-- | Render the given op if it hasn't been rendered already, and return its
-- name.
getOrAddOp :: OpDef -> Build NodeName
getOrAddOp o = do
pending <- getPendingNode o
uses renderedNodes (Map.lookup pending) >>= \case
Just n -> return $ NodeName $ n ^. name
Nothing -> addNewOpFromPending pending
lookupNode :: NodeName -> Build NodeDef
lookupNode n = uses renderedNodeDefs (Map.lookup n) >>= \case
Just n' -> return n'
Nothing -> error $ "lookupNode: unknown node name " ++ show n
-- | Add a new node for a given 'OpDef'. This is used for making "stateful" ops
-- which are not safe to dedup (e.g, "variable" and "assign").
addNewOp :: OpDef -> Build NodeName
addNewOp o = getPendingNode o >>= addNewOpFromPending
addNewOpFromPending :: PendingNode -> Build NodeName
addNewOpFromPending pending = do
nodeName <- renderPendingNode pending
let nodeDef = pendingNodeDef pending & name .~ unNodeName nodeName
nodeBuffer %= (nodeDef :)
renderedNodes %= Map.insert pending nodeDef
renderedNodeDefs %= Map.insert nodeName nodeDef
return nodeName
-- | Get the pending node corresponding to an OpDef, which may or may not have
-- been rendered before. Implicitly renders all of this node's inputs.
getPendingNode :: OpDef -> Build PendingNode
getPendingNode o = do
-- An empty string in the proto field means that no specific
-- device is specified.
dev <- maybe "" deviceName <$> use defaultDevice
scope <- use currentScope
controls <- use defaultControlInputs
let inputs = map encodeOutput (o ^. opInputs)
let controlInputs
= map makeDep (o ^. opControlInputs ++ Set.toList controls)
return $ PendingNode scope (o ^. opName)
$ def & op .~ (unOpType (o ^. opType) :: Text)
& attr .~ _opAttrs o
& input .~ (inputs ++ controlInputs)
& device .~ dev
where
makeDep = ("^" <>) . unNodeName
-- | Pick a name for a pending node. If it has an explicit name, just use that;
-- if the name is implicit, assign a new unique name based on the op type.
renderPendingNode :: PendingNode -> Build NodeName
renderPendingNode (PendingNode scope pendingName nodeDef)
= NodeName . (scopePrefix <>) <$> getName
where
scopePrefix = Text.concat $ fmap ((<> "/") . unScope) scope
getName = case pendingName of
ExplicitName n -> return n
ImplicitName -> do
u@(Unique k) <- use nextUnique
nextUnique .= succ u
return $ nodeDef ^. op <> "_" <> Text.pack (show k)
-- | Turn an 'Output' into a string representation for the TensorFlow
-- foreign APIs.
encodeOutput :: Output -> Text
encodeOutput (Output (OutputIx 0) n) = unNodeName n
encodeOutput (Output (OutputIx i) n) = unNodeName n <> Text.pack (':' : show i)
-- | Modify some part of the state, run an action, and restore the state
-- after that action is done.
withStateLens :: MonadBuild m => Lens' GraphState a -> (a -> a) -> m b -> m b
withStateLens accessor f act = do
old <- build $ use accessor
build $ accessor %= f
result <- act
build $ accessor .= old
return result
-- | Set a device for all nodes rendered in the given 'Build' action
-- (unless further overridden by another use of withDevice).
withDevice :: MonadBuild m => Maybe Device -> m a -> m a
withDevice d = withStateLens defaultDevice (const d)
-- | Prepend a scope to all nodes rendered in the given 'Build' action.
withNameScope :: MonadBuild m => Text -> m a -> m a
withNameScope s = withStateLens currentScope (Scope s :)
-- | Add control inputs to all nodes rendered in the given 'Build' action.
withNodeDependencies :: MonadBuild m => Set NodeName -> m a -> m a
withNodeDependencies nodes = withStateLens defaultControlInputs (<> nodes)
| cem3394/haskell | tensorflow/src/TensorFlow/Build.hs | apache-2.0 | 11,931 | 0 | 19 | 2,556 | 2,760 | 1,507 | 1,253 | 233 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-@ LIQUID "--real" @-}
module Term where
import Data.Vector.Algorithms.Common (shiftRI)
import Language.Haskell.Liquid.Prelude (choose)
{-@ foo :: Nat -> Int @-}
foo :: Int -> Int
foo n = go n
where
go 0 = 1
go (d :: Int) = go (d-1)
{-@ loop :: twit:Nat -> l:Nat -> u:{v:Nat | v = l + twit} -> Int @-}
loop :: Int -> Int -> Int -> Int
loop twit l u
| u <= l = l
| otherwise = case compare (choose 0) 0 of
LT -> loop (u - (k + 1)) (k+1) u
EQ -> k
GT -> loop (k - l) l k
where k = (u + l) `shiftRI` 1
{-@ loop1 :: l:Nat -> u:{v:Nat | l <= v} -> Int / [u - l] @-}
loop1 :: Int -> Int -> Int
loop1 l u
| u <= l = l
| otherwise = case compare (choose 0) 0 of
LT -> loop1 (k+1) u
EQ -> k
GT -> loop1 l k
where k = (u + l) `shiftRI` 1
{-@ loop3 :: l:Nat -> u:{v:Nat | l <= v} -> Int / [u - l] @-}
loop3 :: Int -> Int -> Int
loop3 l u
| len < 100 = len
| otherwise = let a = loop3 l mid
b = loop3 mid u
in a + b
where len = u - l
mid = (u + l) `shiftRI` 1
| abakst/liquidhaskell | benchmarks/vector-algorithms-0.5.4.2/Data/Vector/Algorithms/Termination.hs | bsd-3-clause | 1,274 | 0 | 13 | 509 | 446 | 234 | 212 | 33 | 3 |
import Data.Pipe
import Data.Pipe.List
input :: Pipe () Char IO ()
input = fromList "Hello, world!" `finalize` putStrLn "finalize"
takeP :: Monad m => Int -> Pipe a a m ()
takeP 0 = return ()
takeP n = do
mx <- await
case mx of
Just x -> yield x >> takeP (n - 1)
_ -> return ()
output :: Pipe Char () IO String
output = toList
| YoshikuniJujo/simple-pipe | test/testFinalize.hs | bsd-3-clause | 336 | 0 | 13 | 79 | 167 | 82 | 85 | 13 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
module System.Console.GetOpt.Generics (
-- * Simple IO API
withCli,
WithCli(),
HasArguments,
WithCli.Argument(argumentType, parseArgument),
-- * Customizing the CLI
withCliModified,
Modifier(..),
-- * IO API
getArguments,
modifiedGetArguments,
-- * Pure API
parseArguments,
Result(..),
-- * Re-exports from "Generics.SOP"
Generics.SOP.Generic,
HasDatatypeInfo,
Code,
All2,
) where
import Generics.SOP
import System.Environment
import WithCli
import WithCli.Parser
import WithCli.HasArguments
import WithCli.Result
import System.Console.GetOpt.Generics.Modifier
-- | Parses command line arguments (gotten from 'withArgs') and returns the
-- parsed value. This function should be enough for simple use-cases.
--
-- Throws the same exceptions as 'withCli'.
--
-- Here's an example:
-- ### Start "docs/RecordType.hs" "" Haddock ###
-- |
-- > {-# LANGUAGE DeriveGeneric #-}
-- >
-- > module RecordType where
-- >
-- > import qualified GHC.Generics
-- > import System.Console.GetOpt.Generics
-- >
-- > -- All you have to do is to define a type and derive some instances:
-- >
-- > data Options
-- > = Options {
-- > port :: Int,
-- > daemonize :: Bool,
-- > config :: Maybe FilePath
-- > }
-- > deriving (Show, GHC.Generics.Generic)
-- >
-- > instance Generic Options
-- > instance HasDatatypeInfo Options
-- >
-- > -- Then you can use `getArguments` to create a command-line argument parser:
-- >
-- > main :: IO ()
-- > main = do
-- > options <- getArguments
-- > print (options :: Options)
-- ### End ###
-- | And this is how the above program behaves:
-- ### Start "docs/RecordType.shell-protocol" "" Haddock ###
-- |
-- > $ program --port 8080 --config some/path
-- > Options {port = 8080, daemonize = False, config = Just "some/path"}
-- > $ program --port 8080 --daemonize
-- > Options {port = 8080, daemonize = True, config = Nothing}
-- > $ program --port foo
-- > cannot parse as INTEGER: foo
-- > # exit-code 1
-- > $ program
-- > missing option: --port=INTEGER
-- > # exit-code 1
-- > $ program --help
-- > program [OPTIONS]
-- > --port=INTEGER
-- > --daemonize
-- > --config=STRING (optional)
-- > -h --help show help and exit
-- ### End ###
getArguments :: forall a . (Generic a, HasDatatypeInfo a, All2 HasArguments (Code a)) =>
IO a
getArguments = modifiedGetArguments []
-- | Like 'getArguments` but allows you to pass in 'Modifier's.
modifiedGetArguments :: forall a . (Generic a, HasDatatypeInfo a, All2 HasArguments (Code a)) =>
[Modifier] -> IO a
modifiedGetArguments modifiers = do
args <- getArgs
progName <- getProgName
handleResult $ parseArguments progName modifiers args
-- | Pure variant of 'modifiedGetArguments'.
--
-- Does not throw any exceptions.
parseArguments :: forall a . (Generic a, HasDatatypeInfo a, All2 HasArguments (Code a)) =>
String -- ^ Name of the program (e.g. from 'getProgName').
-> [Modifier] -- ^ List of 'Modifier's to manually tweak the command line interface.
-> [String] -- ^ List of command line arguments to parse (e.g. from 'getArgs').
-> Result a
parseArguments progName mods args = do
modifiers <- mkModifiers mods
parser <- genericParser modifiers
runParser progName modifiers
(normalizeParser (applyModifiers modifiers parser)) args
| kosmikus/getopt-generics | src/System/Console/GetOpt/Generics.hs | bsd-3-clause | 3,594 | 0 | 11 | 799 | 437 | 273 | 164 | 50 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
-- | Generate haddocks
module Stack.Build.Haddock
( generateLocalHaddockIndex
, generateDepsHaddockIndex
, generateSnapHaddockIndex
, shouldHaddockPackage
, shouldHaddockDeps
) where
import Control.Exception (tryJust, onException)
import Control.Monad
import Control.Monad.Catch (MonadCatch)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Trans.Resource
import qualified Data.Foldable as F
import Data.Function
import qualified Data.HashSet as HS
import Data.List
import Data.List.Extra (nubOrd)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Maybe.Extra (mapMaybeM)
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time (UTCTime)
import Path
import Path.Extra
import Path.IO
import Prelude
import Stack.Types.Build
import Stack.PackageDump
import Stack.Types
import qualified System.FilePath as FP
import System.IO.Error (isDoesNotExistError)
import System.Process.Read
-- | Determine whether we should haddock for a package.
shouldHaddockPackage :: BuildOpts
-> Set PackageName -- ^ Packages that we want to generate haddocks for
-- in any case (whether or not we are going to generate
-- haddocks for dependencies)
-> PackageName
-> Bool
shouldHaddockPackage bopts wanted name =
if Set.member name wanted
then boptsHaddock bopts
else shouldHaddockDeps bopts
-- | Determine whether to build haddocks for dependencies.
shouldHaddockDeps :: BuildOpts -> Bool
shouldHaddockDeps bopts = fromMaybe (boptsHaddock bopts) (boptsHaddockDeps bopts)
-- | Generate Haddock index and contents for local packages.
generateLocalHaddockIndex
:: (MonadIO m, MonadCatch m, MonadThrow m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> Map GhcPkgId (DumpPackage () ()) -- ^ Local package dump
-> [LocalPackage]
-> m ()
generateLocalHaddockIndex envOverride wc bco localDumpPkgs locals = do
let dumpPackages =
mapMaybe
(\LocalPackage{lpPackage = Package{..}} ->
F.find
(\dp -> dpPackageIdent dp == PackageIdentifier packageName packageVersion)
localDumpPkgs)
locals
generateHaddockIndex
"local packages"
envOverride
wc
dumpPackages
"."
(localDocDir bco)
-- | Generate Haddock index and contents for local packages and their dependencies.
generateDepsHaddockIndex
:: (MonadIO m, MonadCatch m, MonadThrow m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> Map GhcPkgId (DumpPackage () ()) -- ^ Global dump information
-> Map GhcPkgId (DumpPackage () ()) -- ^ Snapshot dump information
-> Map GhcPkgId (DumpPackage () ()) -- ^ Local dump information
-> [LocalPackage]
-> m ()
generateDepsHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs localDumpPkgs locals = do
let deps = (mapMaybe (`lookupDumpPackage` allDumpPkgs) . nubOrd . findTransitiveDepends . mapMaybe getGhcPkgId) locals
depDocDir = localDocDir bco </> $(mkRelDir "all")
generateHaddockIndex
"local packages and dependencies"
envOverride
wc
deps
".."
depDocDir
where
getGhcPkgId :: LocalPackage -> Maybe GhcPkgId
getGhcPkgId LocalPackage{lpPackage = Package{..}} =
let pkgId = PackageIdentifier packageName packageVersion
mdpPkg = F.find (\dp -> dpPackageIdent dp == pkgId) localDumpPkgs
in fmap dpGhcPkgId mdpPkg
findTransitiveDepends :: [GhcPkgId] -> [GhcPkgId]
findTransitiveDepends = (`go` HS.empty) . HS.fromList
where
go todo checked =
case HS.toList todo of
[] -> HS.toList checked
(ghcPkgId:_) ->
let deps =
case lookupDumpPackage ghcPkgId allDumpPkgs of
Nothing -> HS.empty
Just pkgDP -> HS.fromList (dpDepends pkgDP)
deps' = deps `HS.difference` checked
todo' = HS.delete ghcPkgId (deps' `HS.union` todo)
checked' = HS.insert ghcPkgId checked
in go todo' checked'
allDumpPkgs = [localDumpPkgs, snapshotDumpPkgs, globalDumpPkgs]
-- | Generate Haddock index and contents for all snapshot packages.
generateSnapHaddockIndex
:: (MonadIO m, MonadCatch m, MonadThrow m, MonadLogger m, MonadBaseControl IO m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
-> Map GhcPkgId (DumpPackage () ()) -- ^ Global package dump
-> Map GhcPkgId (DumpPackage () ()) -- ^ Snapshot package dump
-> m ()
generateSnapHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs =
generateHaddockIndex
"snapshot packages"
envOverride
wc
(Map.elems snapshotDumpPkgs ++ Map.elems globalDumpPkgs)
"."
(snapDocDir bco)
-- | Generate Haddock index and contents for specified packages.
generateHaddockIndex
:: (MonadIO m, MonadCatch m, MonadLogger m, MonadBaseControl IO m)
=> Text
-> EnvOverride
-> WhichCompiler
-> [DumpPackage () ()]
-> FilePath
-> Path Abs Dir
-> m ()
generateHaddockIndex descr envOverride wc dumpPackages docRelFP destDir = do
ensureDir destDir
interfaceOpts <- (liftIO . fmap nubOrd . mapMaybeM toInterfaceOpt) dumpPackages
unless (null interfaceOpts) $ do
let destIndexFile = haddockIndexFile destDir
eindexModTime <- liftIO (tryGetModificationTime destIndexFile)
let needUpdate =
case eindexModTime of
Left _ -> True
Right indexModTime ->
or [mt > indexModTime | (_,mt,_,_) <- interfaceOpts]
when needUpdate $ do
$logInfo
(T.concat ["Updating Haddock index for ", descr, " in\n",
T.pack (toFilePath destIndexFile)])
liftIO (mapM_ copyPkgDocs interfaceOpts)
readProcessNull
(Just destDir)
envOverride
(haddockExeName wc)
(["--gen-contents", "--gen-index"] ++ [x | (xs,_,_,_) <- interfaceOpts, x <- xs])
where
toInterfaceOpt :: DumpPackage a b -> IO (Maybe ([String], UTCTime, Path Abs File, Path Abs File))
toInterfaceOpt DumpPackage {..} = do
case dpHaddockInterfaces of
[] -> return Nothing
srcInterfaceFP:_ -> do
srcInterfaceAbsFile <- parseCollapsedAbsFile srcInterfaceFP
let (PackageIdentifier name _) = dpPackageIdent
destInterfaceRelFP =
docRelFP FP.</>
packageIdentifierString dpPackageIdent FP.</>
(packageNameString name FP.<.> "haddock")
destInterfaceAbsFile <- parseCollapsedAbsFile (toFilePath destDir FP.</> destInterfaceRelFP)
esrcInterfaceModTime <- tryGetModificationTime srcInterfaceAbsFile
return $
case esrcInterfaceModTime of
Left _ -> Nothing
Right srcInterfaceModTime ->
Just
( [ "-i"
, concat
[ docRelFP FP.</> packageIdentifierString dpPackageIdent
, ","
, destInterfaceRelFP ]]
, srcInterfaceModTime
, srcInterfaceAbsFile
, destInterfaceAbsFile )
tryGetModificationTime :: Path Abs File -> IO (Either () UTCTime)
tryGetModificationTime = tryJust (guard . isDoesNotExistError) . getModificationTime
copyPkgDocs :: (a, UTCTime, Path Abs File, Path Abs File) -> IO ()
copyPkgDocs (_,srcInterfaceModTime,srcInterfaceAbsFile,destInterfaceAbsFile) = do
-- Copy dependencies' haddocks to documentation directory. This way, relative @../$pkg-$ver@
-- links work and it's easy to upload docs to a web server or otherwise view them in a
-- non-local-filesystem context. We copy instead of symlink for two reasons: (1) symlinks
-- aren't reliably supported on Windows, and (2) the filesystem containing dependencies'
-- docs may not be available where viewing the docs (e.g. if building in a Docker
-- container).
edestInterfaceModTime <- tryGetModificationTime destInterfaceAbsFile
case edestInterfaceModTime of
Left _ -> doCopy
Right destInterfaceModTime
| destInterfaceModTime < srcInterfaceModTime -> doCopy
| otherwise -> return ()
where
doCopy = do
ignoringAbsence (removeDirRecur destHtmlAbsDir)
ensureDir destHtmlAbsDir
onException
(copyDirRecur (parent srcInterfaceAbsFile) destHtmlAbsDir)
(ignoringAbsence (removeDirRecur destHtmlAbsDir))
destHtmlAbsDir = parent destInterfaceAbsFile
-- | Find first DumpPackage matching the GhcPkgId
lookupDumpPackage :: GhcPkgId
-> [Map GhcPkgId (DumpPackage () ())]
-> Maybe (DumpPackage () ())
lookupDumpPackage ghcPkgId dumpPkgs =
listToMaybe $ mapMaybe (Map.lookup ghcPkgId) dumpPkgs
-- | Path of haddock index file.
haddockIndexFile :: Path Abs Dir -> Path Abs File
haddockIndexFile destDir = destDir </> $(mkRelFile "index.html")
-- | Path of local packages documentation directory.
localDocDir :: BaseConfigOpts -> Path Abs Dir
localDocDir bco = bcoLocalInstallRoot bco </> docDirSuffix
-- | Path of snapshot packages documentation directory.
snapDocDir :: BaseConfigOpts -> Path Abs Dir
snapDocDir bco = bcoSnapInstallRoot bco </> docDirSuffix
| harendra-kumar/stack | src/Stack/Build/Haddock.hs | bsd-3-clause | 11,043 | 0 | 24 | 3,649 | 2,229 | 1,159 | 1,070 | 216 | 5 |
{-# LANGUAGE CPP #-}
module TcFlatten(
FlattenEnv(..), FlattenMode(..), mkFlattenEnv,
flatten, flattenMany, flatten_many,
flattenFamApp, flattenTyVarOuter,
unflatten,
eqCanRewrite, eqCanRewriteFR, canRewriteOrSame,
CtFlavourRole, ctEvFlavourRole, ctFlavourRole
) where
#include "HsVersions.h"
import TcRnTypes
import TcType
import Type
import TcEvidence
import TyCon
import TypeRep
import Kind( isSubKind )
import Coercion ( tyConRolesX )
import Var
import VarEnv
import NameEnv
import Outputable
import VarSet
import TcSMonad as TcS
import DynFlags( DynFlags )
import Util
import Bag
import FastString
import Control.Monad( when, liftM )
import MonadUtils ( zipWithAndUnzipM )
import GHC.Exts ( inline )
{-
Note [The flattening story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* A CFunEqCan is either of form
[G] <F xis> : F xis ~ fsk -- fsk is a FlatSkol
[W] x : F xis ~ fmv -- fmv is a unification variable,
-- but untouchable,
-- with MetaInfo = FlatMetaTv
where
x is the witness variable
fsk/fmv is a flatten skolem
xis are function-free
CFunEqCans are always [Wanted], or [Given], never [Derived]
fmv untouchable just means that in a CTyVarEq, say,
fmv ~ Int
we do NOT unify fmv.
* KEY INSIGHTS:
- A given flatten-skolem, fsk, is known a-priori to be equal to
F xis (the LHS), with <F xis> evidence
- A unification flatten-skolem, fmv, stands for the as-yet-unknown
type to which (F xis) will eventually reduce
* Inert set invariant: if F xis1 ~ fsk1, F xis2 ~ fsk2
then xis1 /= xis2
i.e. at most one CFunEqCan with a particular LHS
* Each canonical CFunEqCan x : F xis ~ fsk/fmv has its own
distinct evidence variable x and flatten-skolem fsk/fmv.
Why? We make a fresh fsk/fmv when the constraint is born;
and we never rewrite the RHS of a CFunEqCan.
* Function applications can occur in the RHS of a CTyEqCan. No reason
not allow this, and it reduces the amount of flattening that must occur.
* Flattening a type (F xis):
- If we are flattening in a Wanted/Derived constraint
then create new [W] x : F xis ~ fmv
else create new [G] x : F xis ~ fsk
with fresh evidence variable x and flatten-skolem fsk/fmv
- Add it to the work list
- Replace (F xis) with fsk/fmv in the type you are flattening
- You can also add the CFunEqCan to the "flat cache", which
simply keeps track of all the function applications you
have flattened.
- If (F xis) is in the cache already, just
use its fsk/fmv and evidence x, and emit nothing.
- No need to substitute in the flat-cache. It's not the end
of the world if we start with, say (F alpha ~ fmv1) and
(F Int ~ fmv2) and then find alpha := Int. Athat will
simply give rise to fmv1 := fmv2 via [Interacting rule] below
* Canonicalising a CFunEqCan [G/W] x : F xis ~ fsk/fmv
- Flatten xis (to substitute any tyvars; there are already no functions)
cos :: xis ~ flat_xis
- New wanted x2 :: F flat_xis ~ fsk/fmv
- Add new wanted to flat cache
- Discharge x = F cos ; x2
* Unification flatten-skolems, fmv, ONLY get unified when either
a) The CFunEqCan takes a step, using an axiom
b) During un-flattening
They are never unified in any other form of equality.
For example [W] ffmv ~ Int is stuck; it does not unify with fmv.
* We *never* substitute in the RHS (i.e. the fsk/fmv) of a CFunEqCan.
That would destroy the invariant about the shape of a CFunEqCan,
and it would risk wanted/wanted interactions. The only way we
learn information about fsk is when the CFunEqCan takes a step.
However we *do* substitute in the LHS of a CFunEqCan (else it
would never get to fire!)
* [Interacting rule]
(inert) [W] x1 : F tys ~ fmv1
(work item) [W] x2 : F tys ~ fmv2
Just solve one from the other:
x2 := x1
fmv2 := fmv1
This just unites the two fsks into one.
Always solve given from wanted if poss.
* [Firing rule: wanteds]
(work item) [W] x : F tys ~ fmv
instantiate axiom: ax_co : F tys ~ rhs
Dischard fmv:
fmv := alpha
x := ax_co ; sym x2
[W] x2 : alpha ~ rhs (Non-canonical)
discharging the work item. This is the way that fmv's get
unified; even though they are "untouchable".
NB: this deals with the case where fmv appears in xi, which can
happen; it just happens through the non-canonical stuff
Possible short cut (shortCutReduction) if rhs = G rhs_tys,
where G is a type function. Then
- Flatten rhs_tys (cos : rhs_tys ~ rhs_xis)
- Add G rhs_xis ~ fmv to flat cache
- New wanted [W] x2 : G rhs_xis ~ fmv
- Discharge x := co ; G cos ; x2
* [Firing rule: givens]
(work item) [G] g : F tys ~ fsk
instantiate axiom: co : F tys ~ rhs
Now add non-canonical (since rhs is not flat)
[G] (sym g ; co) : fsk ~ rhs
Short cut (shortCutReduction) for when rhs = G rhs_tys and G is a type function
[G] (co ; g) : G tys ~ fsk
But need to flatten tys: flat_cos : tys ~ flat_tys
[G] (sym (G flat_cos) ; co ; g) : G flat_tys ~ fsk
Why given-fsks, alone, doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Could we get away with only flatten meta-tyvars, with no flatten-skolems? No.
[W] w : alpha ~ [F alpha Int]
---> flatten
w = ...w'...
[W] w' : alpha ~ [fsk]
[G] <F alpha Int> : F alpha Int ~ fsk
--> unify (no occurs check)
alpha := [fsk]
But since fsk = F alpha Int, this is really an occurs check error. If
that is all we know about alpha, we will succeed in constraint
solving, producing a program with an infinite type.
Even if we did finally get (g : fsk ~ Boo)l by solving (F alpha Int ~ fsk)
using axiom, zonking would not see it, so (x::alpha) sitting in the
tree will get zonked to an infinite type. (Zonking always only does
refl stuff.)
Why flatten-meta-vars, alone doesn't work
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Look at Simple13, with unification-fmvs only
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fmv]
[W] x : F a ~ fmv
--> subst a in x
x = F g' ; x2
[W] x2 : F [fmv] ~ fmv
And now we have an evidence cycle between g' and x!
If we used a given instead (ie current story)
[G] g : a ~ [F a]
---> Flatten given
g' = g;[x]
[G] g' : a ~ [fsk]
[G] <F a> : F a ~ fsk
---> Substitute for a
[G] g' : a ~ [fsk]
[G] F (sym g'); <F a> : F [fsk] ~ fsk
Why is it right to treat fmv's differently to ordinary unification vars?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
f :: forall a. a -> a -> Bool
g :: F Int -> F Int -> Bool
Consider
f (x:Int) (y:Bool)
This gives alpha~Int, alpha~Bool. There is an inconsistency,
but really only one error. SherLoc may tell you which location
is most likely, based on other occurrences of alpha.
Consider
g (x:Int) (y:Bool)
Here we get (F Int ~ Int, F Int ~ Bool), which flattens to
(fmv ~ Int, fmv ~ Bool)
But there are really TWO separate errors. We must not complain
about Int~Bool. Moreover these two errors could arise in entirely
unrelated parts of the code. (In the alpha case, there must be
*some* connection (eg v:alpha in common envt).)
Note [Orient equalities with flatten-meta-vars on the left]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This example comes from IndTypesPerfMerge
From the ambiguity check for
f :: (F a ~ a) => a
we get:
[G] F a ~ a
[W] F alpha ~ alpha, alpha ~ a
From Givens we get
[G] F a ~ fsk, fsk ~ a
Now if we flatten we get
[W] alpha ~ fmv, F alpha ~ fmv, alpha ~ a
Now, processing the first one first, choosing alpha := fmv
[W] F fmv ~ fmv, fmv ~ a
And now we are stuck. We must either *unify* fmv := a, or
use the fmv ~ a to rewrite F fmv ~ fmv, so we can make it
meet up with the given F a ~ blah.
Solution: always put fmvs on the left, so we get
[W] fmv ~ alpha, F alpha ~ fmv, alpha ~ a
The point is that fmvs are very uninformative, so doing alpha := fmv
is a bad idea. We want to use other constraints on alpha first.
Note [Derived constraints from wanted CTyEqCans]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is this type ambiguous: (Foo e ~ Maybe e) => Foo e
(indexed-types/should_fail/T4093a)
[G] Foo e ~ Maybe e
[W] Foo e ~ Foo ee -- ee is a unification variable
[W] Foo ee ~ Maybe ee)
---
[G] Foo e ~ fsk
[G] fsk ~ Maybe e
[W] Foo e ~ fmv1
[W] Foo ee ~ fmv2
[W] fmv1 ~ fmv2
[W] fmv2 ~ Maybe ee
---> fmv1 := fsk by matching LHSs
[W] Foo ee ~ fmv2
[W] fsk ~ fmv2
[W] fmv2 ~ Maybe ee
--->
[W] Foo ee ~ fmv2
[W] fmv2 ~ Maybe e
[W] fmv2 ~ Maybe ee
Now maybe we shuld get [D] e ~ ee, and then we'd solve it entirely.
But if in a smilar situation we got [D] Int ~ Bool we'd be back
to complaining about wanted/wanted interactions. Maybe this arises
also for fundeps?
Here's another example:
f :: [a] -> [b] -> blah
f (e1 :: F Int) (e2 :: F Int)
we get
F Int ~ fmv
fmv ~ [alpha]
fmv ~ [beta]
We want: alpha := beta (which might unlock something else). If we
generated [D] [alpha] ~ [beta] we'd be good here.
Current story: we don't generate these derived constraints. We could, but
we'd want to make them very weak, so we didn't get the Int~Bool complaint.
************************************************************************
* *
* Other notes (Oct 14)
I have not revisted these, but I didn't want to discard them
* *
************************************************************************
Try: rewrite wanted with wanted only for fmvs (not all meta-tyvars)
But: fmv ~ alpha[0]
alpha[0] ~ fmv’
Now we don’t see that fmv ~ fmv’, which is a problem for injectivity detection.
Conclusion: rewrite wanteds with wanted for all untouchables.
skol ~ untch, must re-orieint to untch ~ skol, so that we can use it to rewrite.
************************************************************************
* *
* Examples
Here is a long series of examples I had to work through
* *
************************************************************************
Simple20
~~~~~~~~
axiom F [a] = [F a]
[G] F [a] ~ a
-->
[G] fsk ~ a
[G] [F a] ~ fsk (nc)
-->
[G] F a ~ fsk2
[G] fsk ~ [fsk2]
[G] fsk ~ a
-->
[G] F a ~ fsk2
[G] a ~ [fsk2]
[G] fsk ~ a
-----------------------------------
----------------------------------------
indexed-types/should_compile/T44984
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
flatten
~~~~~~~
fmv0 := F Bool
fmv1 := H (F Bool)
fmv2 := H alpha
alpha := F Bool
plus
fmv1 ~ fmv2
But these two are equal under the above assumptions.
Solve by Refl.
--- under plan B, namely solve fmv1:=fmv2 eagerly ---
[W] H (F Bool) ~ H alpha
[W] alpha ~ F Bool
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2
fmv1 ~ fmv2
fmv0 ~ alpha
-->
F Bool ~ fmv0
H fmv0 ~ fmv1
H alpha ~ fmv2 fmv2 := fmv1
fmv0 ~ alpha
flatten
fmv0 := F Bool
fmv1 := H fmv0 = H (F Bool)
retain H alpha ~ fmv2
because fmv2 has been filled
alpha := F Bool
----------------------------
indexed-types/should_failt/T4179
after solving
[W] fmv_1 ~ fmv_2
[W] A3 (FCon x) ~ fmv_1 (CFunEqCan)
[W] A3 (x (aoa -> fmv_2)) ~ fmv_2 (CFunEqCan)
----------------------------------------
indexed-types/should_fail/T7729a
a) [W] BasePrimMonad (Rand m) ~ m1
b) [W] tt m1 ~ BasePrimMonad (Rand m)
---> process (b) first
BasePrimMonad (Ramd m) ~ fmv_atH
fmv_atH ~ tt m1
---> now process (a)
m1 ~ s_atH ~ tt m1 -- An obscure occurs check
----------------------------------------
typecheck/TcTypeNatSimple
Original constraint
[W] x + y ~ x + alpha (non-canonical)
==>
[W] x + y ~ fmv1 (CFunEqCan)
[W] x + alpha ~ fmv2 (CFuneqCan)
[W] fmv1 ~ fmv2 (CTyEqCan)
(sigh)
----------------------------------------
indexed-types/should_fail/GADTwrong1
[G] Const a ~ ()
==> flatten
[G] fsk ~ ()
work item: Const a ~ fsk
==> fire top rule
[G] fsk ~ ()
work item fsk ~ ()
Surely the work item should rewrite to () ~ ()? Well, maybe not;
it'a very special case. More generally, our givens look like
F a ~ Int, where (F a) is not reducible.
----------------------------------------
indexed_types/should_fail/T8227:
Why using a different can-rewrite rule in CFunEqCan heads
does not work.
Assuming NOT rewriting wanteds with wanteds
Inert: [W] fsk_aBh ~ fmv_aBk -> fmv_aBk
[W] fmv_aBk ~ fsk_aBh
[G] Scalar fsk_aBg ~ fsk_aBh
[G] V a ~ f_aBg
Worklist includes [W] Scalar fmv_aBi ~ fmv_aBk
fmv_aBi, fmv_aBk are flatten unificaiton variables
Work item: [W] V fsk_aBh ~ fmv_aBi
Note that the inert wanteds are cyclic, because we do not rewrite
wanteds with wanteds.
Then we go into a loop when normalise the work-item, because we
use rewriteOrSame on the argument of V.
Conclusion: Don't make canRewrite context specific; instead use
[W] a ~ ty to rewrite a wanted iff 'a' is a unification variable.
----------------------------------------
Here is a somewhat similar case:
type family G a :: *
blah :: (G a ~ Bool, Eq (G a)) => a -> a
blah = error "urk"
foo x = blah x
For foo we get
[W] Eq (G a), G a ~ Bool
Flattening
[W] G a ~ fmv, Eq fmv, fmv ~ Bool
We can't simplify away the Eq Bool unless we substitute for fmv.
Maybe that doesn't matter: we would still be left with unsolved
G a ~ Bool.
--------------------------
Trac #9318 has a very simple program leading to
[W] F Int ~ Int
[W] F Int ~ Bool
We don't want to get "Error Int~Bool". But if fmv's can rewrite
wanteds, we will
[W] fmv ~ Int
[W] fmv ~ Bool
--->
[W] Int ~ Bool
************************************************************************
* *
* The main flattening functions
* *
************************************************************************
Note [Flattening]
~~~~~~~~~~~~~~~~~~~~
flatten ty ==> (xi, cc)
where
xi has no type functions, unless they appear under ForAlls
cc = Auxiliary given (equality) constraints constraining
the fresh type variables in xi. Evidence for these
is always the identity coercion, because internally the
fresh flattening skolem variables are actually identified
with the types they have been generated to stand in for.
Note that it is flatten's job to flatten *every type function it sees*.
flatten is only called on *arguments* to type functions, by canEqGiven.
Recall that in comments we use alpha[flat = ty] to represent a
flattening skolem variable alpha which has been generated to stand in
for ty.
----- Example of flattening a constraint: ------
flatten (List (F (G Int))) ==> (xi, cc)
where
xi = List alpha
cc = { G Int ~ beta[flat = G Int],
F beta ~ alpha[flat = F beta] }
Here
* alpha and beta are 'flattening skolem variables'.
* All the constraints in cc are 'given', and all their coercion terms
are the identity.
NB: Flattening Skolems only occur in canonical constraints, which
are never zonked, so we don't need to worry about zonking doing
accidental unflattening.
Note that we prefer to leave type synonyms unexpanded when possible,
so when the flattener encounters one, it first asks whether its
transitive expansion contains any type function applications. If so,
it expands the synonym and proceeds; if not, it simply returns the
unexpanded synonym.
Note [Flattener EqRels]
~~~~~~~~~~~~~~~~~~~~~~~
When flattening, we need to know which equality relation -- nominal
or representation -- we should be respecting. The only difference is
that we rewrite variables by representational equalities when fe_eq_rel
is ReprEq.
-}
data FlattenEnv
= FE { fe_mode :: FlattenMode
, fe_loc :: CtLoc
, fe_flavour :: CtFlavour
, fe_eq_rel :: EqRel } -- See Note [Flattener EqRels]
data FlattenMode -- Postcondition for all three: inert wrt the type substitution
= FM_FlattenAll -- Postcondition: function-free
| FM_Avoid TcTyVar Bool -- See Note [Lazy flattening]
-- Postcondition:
-- * tyvar is only mentioned in result under a rigid path
-- e.g. [a] is ok, but F a won't happen
-- * If flat_top is True, top level is not a function application
-- (but under type constructors is ok e.g. [F a])
| FM_SubstOnly -- See Note [Flattening under a forall]
mkFlattenEnv :: FlattenMode -> CtEvidence -> FlattenEnv
mkFlattenEnv fm ctev = FE { fe_mode = fm
, fe_loc = ctEvLoc ctev
, fe_flavour = ctEvFlavour ctev
, fe_eq_rel = ctEvEqRel ctev }
feRole :: FlattenEnv -> Role
feRole = eqRelRole . fe_eq_rel
{-
Note [Lazy flattening]
~~~~~~~~~~~~~~~~~~~~~~
The idea of FM_Avoid mode is to flatten less aggressively. If we have
a ~ [F Int]
there seems to be no great merit in lifting out (F Int). But if it was
a ~ [G a Int]
then we *do* want to lift it out, in case (G a Int) reduces to Bool, say,
which gets rid of the occurs-check problem. (For the flat_top Bool, see
comments above and at call sites.)
HOWEVER, the lazy flattening actually seems to make type inference go
*slower*, not faster. perf/compiler/T3064 is a case in point; it gets
*dramatically* worse with FM_Avoid. I think it may be because
floating the types out means we normalise them, and that often makes
them smaller and perhaps allows more re-use of previously solved
goals. But to be honest I'm not absolutely certain, so I am leaving
FM_Avoid in the code base. What I'm removing is the unique place
where it is *used*, namely in TcCanonical.canEqTyVar.
See also Note [Conservative unification check] in TcUnify, which gives
other examples where lazy flattening caused problems.
Bottom line: FM_Avoid is unused for now (Nov 14).
Note: T5321Fun got faster when I disabled FM_Avoid
T5837 did too, but it's pathalogical anyway
Note [Phantoms in the flattener]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
data Proxy p = Proxy
and we're flattening (Proxy ty) w.r.t. ReprEq. Then, we know that `ty`
is really irrelevant -- it will be ignored when solving for representational
equality later on. So, we omit flattening `ty` entirely. This may
violate the expectation of "xi"s for a bit, but the canonicaliser will
soon throw out the phantoms when decomposing a TyConApp. (Or, the
canonicaliser will emit an insoluble, in which case the unflattened version
yields a better error message anyway.)
Note [flatten_many performance]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In programs with lots of type-level evaluation, flatten_many becomes
part of a tight loop. For example, see test perf/compiler/T9872a, which
calls flatten_many a whopping 7,106,808 times. It is thus important
that flatten_many be efficient.
Performance testing showed that the current implementation is indeed
efficient. It's critically important that zipWithAndUnzipM be
specialized to TcS, and it's also quite helpful to actually `inline`
it. On test T9872a, here are the allocation stats (Dec 16, 2014):
* Unspecialized, uninlined: 8,472,613,440 bytes allocated in the heap
* Specialized, uninlined: 6,639,253,488 bytes allocated in the heap
* Specialized, inlined: 6,281,539,792 bytes allocated in the heap
To improve performance even further, flatten_many_nom is split off
from flatten_many, as nominal equality is the common case. This would
be natural to write using mapAndUnzipM, but even inlined, that function
is not as performant as a hand-written loop.
* mapAndUnzipM, inlined: 7,463,047,432 bytes allocated in the heap
* hand-written recursion: 5,848,602,848 bytes allocated in the heap
If you make any change here, pay close attention to the T9872{a,b,c} tests
and T5321Fun.
If we need to make this yet more performant, a possible way forward is to
duplicate the flattener code for the nominal case, and make that case
faster. This doesn't seem quite worth it, yet.
-}
------------------
flatten :: FlattenMode -> CtEvidence -> TcType -> TcS (Xi, TcCoercion)
flatten mode ev ty
= runFlatten (flatten_one fmode ty)
where
fmode = mkFlattenEnv mode ev
flattenMany :: FlattenMode -> CtEvidence -> [Role]
-> [TcType] -> TcS ([Xi], [TcCoercion])
-- Flatten a bunch of types all at once. Roles on the coercions returned
-- always match the corresponding roles passed in.
flattenMany mode ev roles tys
= runFlatten (flatten_many fmode roles tys)
where
fmode = mkFlattenEnv mode ev
flattenFamApp :: FlattenMode -> CtEvidence
-> TyCon -> [TcType] -> TcS (Xi, TcCoercion)
flattenFamApp mode ev tc tys
= runFlatten (flatten_fam_app fmode tc tys)
where
fmode = mkFlattenEnv mode ev
------------------
flatten_many :: FlattenEnv -> [Role] -> [Type] -> TcS ([Xi], [TcCoercion])
-- Coercions :: Xi ~ Type, at roles given
-- Returns True iff (no flattening happened)
-- NB: The EvVar inside the 'fe_ev :: CtEvidence' is unused,
-- we merely want (a) Given/Solved/Derived/Wanted info
-- (b) the GivenLoc/WantedLoc for when we create new evidence
flatten_many fmode roles tys
-- See Note [flatten_many performance]
= inline zipWithAndUnzipM go roles tys
where
go Nominal ty = flatten_one (setFEEqRel fmode NomEq) ty
go Representational ty = flatten_one (setFEEqRel fmode ReprEq) ty
go Phantom ty = -- See Note [Phantoms in the flattener]
return (ty, mkTcPhantomCo ty ty)
-- | Like 'flatten_many', but assumes that every role is nominal.
flatten_many_nom :: FlattenEnv -> [Type] -> TcS ([Xi], [TcCoercion])
flatten_many_nom _ [] = return ([], [])
-- See Note [flatten_many performance]
flatten_many_nom fmode (ty:tys)
= ASSERT( fe_eq_rel fmode == NomEq )
do { (xi, co) <- flatten_one fmode ty
; (xis, cos) <- flatten_many_nom fmode tys
; return (xi:xis, co:cos) }
------------------
flatten_one :: FlattenEnv -> TcType -> TcS (Xi, TcCoercion)
-- Flatten a type to get rid of type function applications, returning
-- the new type-function-free type, and a collection of new equality
-- constraints. See Note [Flattening] for more detail.
--
-- Postcondition: Coercion :: Xi ~ TcType
-- The role on the result coercion matches the EqRel in the FlattenEnv
flatten_one fmode xi@(LitTy {}) = return (xi, mkTcReflCo (feRole fmode) xi)
flatten_one fmode (TyVarTy tv)
= flattenTyVar fmode tv
flatten_one fmode (AppTy ty1 ty2)
= do { (xi1,co1) <- flatten_one fmode ty1
; case (fe_eq_rel fmode, nextRole xi1) of
(NomEq, _) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Nominal) -> flatten_rhs xi1 co1 NomEq
(ReprEq, Representational) -> flatten_rhs xi1 co1 ReprEq
(ReprEq, Phantom) ->
return (mkAppTy xi1 ty2, co1 `mkTcAppCo` mkTcNomReflCo ty2) }
where
flatten_rhs xi1 co1 eq_rel2
= do { (xi2,co2) <- flatten_one (setFEEqRel fmode eq_rel2) ty2
; traceTcS "flatten/appty"
(ppr ty1 $$ ppr ty2 $$ ppr xi1 $$
ppr co1 $$ ppr xi2 $$ ppr co2)
; let role1 = feRole fmode
role2 = eqRelRole eq_rel2
; return ( mkAppTy xi1 xi2
, mkTcTransAppCo role1 co1 xi1 ty1
role2 co2 xi2 ty2
role1 ) } -- output should match fmode
flatten_one fmode (FunTy ty1 ty2)
= do { (xi1,co1) <- flatten_one fmode ty1
; (xi2,co2) <- flatten_one fmode ty2
; return (mkFunTy xi1 xi2, mkTcFunCo (feRole fmode) co1 co2) }
flatten_one fmode (TyConApp tc tys)
-- Expand type synonyms that mention type families
-- on the RHS; see Note [Flattening synonyms]
| Just (tenv, rhs, tys') <- tcExpandTyCon_maybe tc tys
, let expanded_ty = mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys'
= case fe_mode fmode of
FM_FlattenAll | anyNameEnv isTypeFamilyTyCon (tyConsOfType rhs)
-> flatten_one fmode expanded_ty
| otherwise
-> flattenTyConApp fmode tc tys
_ -> flattenTyConApp fmode tc tys
-- Otherwise, it's a type function application, and we have to
-- flatten it away as well, and generate a new given equality constraint
-- between the application and a newly generated flattening skolem variable.
| isTypeFamilyTyCon tc
= flatten_fam_app fmode tc tys
-- For * a normal data type application
-- * data family application
-- we just recursively flatten the arguments.
| otherwise
-- FM_Avoid stuff commented out; see Note [Lazy flattening]
-- , let fmode' = case fmode of -- Switch off the flat_top bit in FM_Avoid
-- FE { fe_mode = FM_Avoid tv _ }
-- -> fmode { fe_mode = FM_Avoid tv False }
-- _ -> fmode
= flattenTyConApp fmode tc tys
flatten_one fmode ty@(ForAllTy {})
-- We allow for-alls when, but only when, no type function
-- applications inside the forall involve the bound type variables.
= do { let (tvs, rho) = splitForAllTys ty
; (rho', co) <- flatten_one (setFEMode fmode FM_SubstOnly) rho
-- Substitute only under a forall
-- See Note [Flattening under a forall]
; return (mkForAllTys tvs rho', foldr mkTcForAllCo co tvs) }
flattenTyConApp :: FlattenEnv -> TyCon -> [TcType] -> TcS (Xi, TcCoercion)
flattenTyConApp fmode tc tys
= do { (xis, cos) <- case fe_eq_rel fmode of
NomEq -> flatten_many_nom fmode tys
ReprEq -> flatten_many fmode (tyConRolesX role tc) tys
; return (mkTyConApp tc xis, mkTcTyConAppCo role tc cos) }
where
role = feRole fmode
{-
Note [Flattening synonyms]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Not expanding synonyms aggressively improves error messages, and
keeps types smaller. But we need to take care.
Suppose
type T a = a -> a
and we want to flatten the type (T (F a)). Then we can safely flatten
the (F a) to a skolem, and return (T fsk). We don't need to expand the
synonym. This works because TcTyConAppCo can deal with synonyms
(unlike TyConAppCo), see Note [TcCoercions] in TcEvidence.
But (Trac #8979) for
type T a = (F a, a) where F is a type function
we must expand the synonym in (say) T Int, to expose the type function
to the flattener.
Note [Flattening under a forall]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Under a forall, we
(a) MUST apply the inert substitution
(b) MUST NOT flatten type family applications
Hence FMSubstOnly.
For (a) consider c ~ a, a ~ T (forall b. (b, [c]))
If we don't apply the c~a substitution to the second constraint
we won't see the occurs-check error.
For (b) consider (a ~ forall b. F a b), we don't want to flatten
to (a ~ forall b.fsk, F a b ~ fsk)
because now the 'b' has escaped its scope. We'd have to flatten to
(a ~ forall b. fsk b, forall b. F a b ~ fsk b)
and we have not begun to think about how to make that work!
************************************************************************
* *
Flattening a type-family application
* *
************************************************************************
-}
flatten_fam_app, flatten_exact_fam_app, flatten_exact_fam_app_fully
:: FlattenEnv -> TyCon -> [TcType] -> TcS (Xi, TcCoercion)
-- flatten_fam_app can be over-saturated
-- flatten_exact_fam_app is exactly saturated
-- flatten_exact_fam_app_fully lifts out the application to top level
-- Postcondition: Coercion :: Xi ~ F tys
flatten_fam_app fmode tc tys -- Can be over-saturated
= ASSERT( tyConArity tc <= length tys ) -- Type functions are saturated
-- The type function might be *over* saturated
-- in which case the remaining arguments should
-- be dealt with by AppTys
do { let (tys1, tys_rest) = splitAt (tyConArity tc) tys
; (xi1, co1) <- flatten_exact_fam_app fmode tc tys1
-- co1 :: xi1 ~ F tys1
-- all Nominal roles b/c the tycon is oversaturated
; (xis_rest, cos_rest) <- flatten_many fmode (repeat Nominal) tys_rest
-- cos_res :: xis_rest ~ tys_rest
; return ( mkAppTys xi1 xis_rest -- NB mkAppTys: rhs_xi might not be a type variable
-- cf Trac #5655
, mkTcAppCos co1 cos_rest -- (rhs_xi :: F xis) ; (F cos :: F xis ~ F tys)
) }
flatten_exact_fam_app fmode tc tys
= case fe_mode fmode of
FM_FlattenAll -> flatten_exact_fam_app_fully fmode tc tys
FM_SubstOnly -> do { (xis, cos) <- flatten_many fmode roles tys
; return ( mkTyConApp tc xis
, mkTcTyConAppCo (feRole fmode) tc cos ) }
FM_Avoid tv flat_top ->
do { (xis, cos) <- flatten_many fmode roles tys
; if flat_top || tv `elemVarSet` tyVarsOfTypes xis
then flatten_exact_fam_app_fully fmode tc tys
else return ( mkTyConApp tc xis
, mkTcTyConAppCo (feRole fmode) tc cos ) }
where
-- These are always going to be Nominal for now,
-- but not if #8177 is implemented
roles = tyConRolesX (feRole fmode) tc
flatten_exact_fam_app_fully fmode tc tys
-- See Note [Reduce type family applications eagerly]
= try_to_reduce tc tys False id $
do { (xis, cos) <- flatten_many_nom (setFEEqRel (setFEMode fmode FM_FlattenAll) NomEq) tys
; let ret_co = mkTcTyConAppCo (feRole fmode) tc cos
-- ret_co :: F xis ~ F tys
; mb_ct <- lookupFlatCache tc xis
; case mb_ct of
Just (co, rhs_ty, flav) -- co :: F xis ~ fsk
| (flav, NomEq) `canRewriteOrSameFR` (feFlavourRole fmode)
-> -- Usable hit in the flat-cache
-- We certainly *can* use a Wanted for a Wanted
do { traceTcS "flatten/flat-cache hit" $ (ppr tc <+> ppr xis $$ ppr rhs_ty $$ ppr co)
; (fsk_xi, fsk_co) <- flatten_one fmode rhs_ty
-- The fsk may already have been unified, so flatten it
-- fsk_co :: fsk_xi ~ fsk
; return (fsk_xi, fsk_co `mkTcTransCo`
maybeTcSubCo (fe_eq_rel fmode)
(mkTcSymCo co) `mkTcTransCo`
ret_co) }
-- :: fsk_xi ~ F xis
-- Try to reduce the family application right now
-- See Note [Reduce type family applications eagerly]
_ -> try_to_reduce tc xis True (`mkTcTransCo` ret_co) $
do { let fam_ty = mkTyConApp tc xis
; (ev, fsk) <- newFlattenSkolem (fe_flavour fmode)
(fe_loc fmode)
fam_ty
; let fsk_ty = mkTyVarTy fsk
co = ctEvCoercion ev
; extendFlatCache tc xis (co, fsk_ty, ctEvFlavour ev)
-- The new constraint (F xis ~ fsk) is not necessarily inert
-- (e.g. the LHS may be a redex) so we must put it in the work list
; let ct = CFunEqCan { cc_ev = ev
, cc_fun = tc
, cc_tyargs = xis
, cc_fsk = fsk }
; emitFlatWork ct
; traceTcS "flatten/flat-cache miss" $ (ppr fam_ty $$ ppr fsk $$ ppr ev)
; return (fsk_ty, maybeTcSubCo (fe_eq_rel fmode)
(mkTcSymCo co)
`mkTcTransCo` ret_co) }
}
where
try_to_reduce :: TyCon -- F, family tycon
-> [Type] -- args, not necessarily flattened
-> Bool -- add to the flat cache?
-> ( TcCoercion -- :: xi ~ F args
-> TcCoercion ) -- what to return from outer function
-> TcS (Xi, TcCoercion) -- continuation upon failure
-> TcS (Xi, TcCoercion)
try_to_reduce tc tys cache update_co k
= do { mb_match <- matchFam tc tys
; case mb_match of
Just (norm_co, norm_ty)
-> do { traceTcS "Eager T.F. reduction success" $
vcat [ppr tc, ppr tys, ppr norm_ty, ppr cache]
; (xi, final_co) <- flatten_one fmode norm_ty
; let co = norm_co `mkTcTransCo` mkTcSymCo final_co
; when cache $
extendFlatCache tc tys (co, xi, fe_flavour fmode)
; return (xi, update_co $ mkTcSymCo co) }
Nothing -> k }
{- Note [Reduce type family applications eagerly]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we come across a type-family application like (Append (Cons x Nil) t),
then, rather than flattening to a skolem etc, we may as well just reduce
it on the spot to (Cons x t). This saves a lot of intermediate steps.
Examples that are helped are tests T9872, and T5321Fun.
Performance testing indicates that it's best to try this *twice*, once
before flattening arguments and once after flattening arguments.
Adding the extra reduction attempt before flattening arguments cut
the allocation amounts for the T9872{a,b,c} tests by half. Testing
also indicated that the early reduction should not use the flat-cache,
but that the later reduction should. It's possible that with more
examples, we might learn that these knobs should be set differently.
Once we've got a flat rhs, we extend the flatten-cache to record the
result. Doing so can save lots of work when the same redex shows up
more than once. Note that we record the link from the redex all the
way to its *final* value, not just the single step reduction.
************************************************************************
* *
Flattening a type variable
* *
************************************************************************
Note [The inert equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Definition [Can-rewrite relation]
A "can-rewrite" relation between flavours, written f1 >= f2, is a
binary relation with the following properties
R1. >= is transitive
R2. If f1 >= f, and f2 >= f,
then either f1 >= f2 or f2 >= f1
Lemma. If f1 >= f then f1 >= f1
Proof. By property (R2), with f1=f2
Definition [Generalised substitution]
A "generalised substitution" S is a set of triples (a -f-> t), where
a is a type variable
t is a type
f is a flavour
such that
(WF1) if (a -f1-> t1) in S
(a -f2-> t2) in S
then neither (f1 >= f2) nor (f2 >= f1) hold
(WF2) if (a -f-> t) is in S, then t /= a
Definition [Applying a generalised substitution]
If S is a generalised substitution
S(f,a) = t, if (a -fs-> t) in S, and fs >= f
= a, otherwise
Application extends naturally to types S(f,t), modulo roles.
See Note [Flavours with roles].
Theorem: S(f,a) is well defined as a function.
Proof: Suppose (a -f1-> t1) and (a -f2-> t2) are both in S,
and f1 >= f and f2 >= f
Then by (R2) f1 >= f2 or f2 >= f1, which contradicts (WF)
Notation: repeated application.
S^0(f,t) = t
S^(n+1)(f,t) = S(f, S^n(t))
Definition: inert generalised substitution
A generalised substitution S is "inert" iff
(IG1) there is an n such that
for every f,t, S^n(f,t) = S^(n+1)(f,t)
(IG2) if (b -f-> t) in S, and f >= f, then S(f,t) = t
that is, each individual binding is "self-stable"
----------------------------------------------------------------
Our main invariant:
the inert CTyEqCans should be an inert generalised substitution
----------------------------------------------------------------
Note that inertness is not the same as idempotence. To apply S to a
type, you may have to apply it recursive. But inertness does
guarantee that this recursive use will terminate.
---------- The main theorem --------------
Suppose we have a "work item"
a -fw-> t
and an inert generalised substitution S,
such that
(T1) S(fw,a) = a -- LHS of work-item is a fixpoint of S(fw,_)
(T2) S(fw,t) = t -- RHS of work-item is a fixpoint of S(fw,_)
(T3) a not in t -- No occurs check in the work item
(K1) if (a -fs-> s) is in S then not (fw >= fs)
(K2) if (b -fs-> s) is in S, where b /= a, then
(K2a) not (fs >= fs)
or (K2b) not (fw >= fs)
or (K2c) a not in s
(K3) If (b -fs-> s) is in S with (fw >= fs), then
(K3a) If the role of fs is nominal: s /= a
(K3b) If the role of fs is representational: EITHER
a not in s, OR
the path from the top of s to a includes at least one non-newtype
then the extended substition T = S+(a -fw-> t)
is an inert generalised substitution.
The idea is that
* (T1-2) are guaranteed by exhaustively rewriting the work-item
with S(fw,_).
* T3 is guaranteed by a simple occurs-check on the work item.
* (K1-3) are the "kick-out" criteria. (As stated, they are really the
"keep" criteria.) If the current inert S contains a triple that does
not satisfy (K1-3), then we remove it from S by "kicking it out",
and re-processing it.
* Note that kicking out is a Bad Thing, because it means we have to
re-process a constraint. The less we kick out, the better.
TODO: Make sure that kicking out really *is* a Bad Thing. We've assumed
this but haven't done the empirical study to check.
* Assume we have G>=G, G>=W, D>=D, and that's all. Then, when performing
a unification we add a new given a -G-> ty. But doing so does NOT require
us to kick out an inert wanted that mentions a, because of (K2a). This
is a common case, hence good not to kick out.
* Lemma (L1): The conditions of the Main Theorem imply that there is no
(a fs-> t) in S, s.t. (fs >= fw).
Proof. Suppose the contrary (fs >= fw). Then because of (T1),
S(fw,a)=a. But since fs>=fw, S(fw,a) = s, hence s=a. But now we
have (a -fs-> a) in S, which contradicts (WF2).
* The extended substitution satisfies (WF1) and (WF2)
- (K1) plus (L1) guarantee that the extended substiution satisfies (WF1).
- (T3) guarantees (WF2).
* (K2) is about inertness. Intuitively, any infinite chain T^0(f,t),
T^1(f,t), T^2(f,T).... must pass through the new work item infnitely
often, since the substution without the work item is inert; and must
pass through at least one of the triples in S infnitely often.
- (K2a): if not(fs>=fs) then there is no f that fs can rewrite (fs>=f),
and hence this triple never plays a role in application S(f,a).
It is always safe to extend S with such a triple.
(NB: we could strengten K1) in this way too, but see K3.
- (K2b): If this holds, we can't pass through this triple infinitely
often, because if we did then fs>=f, fw>=f, hence fs>=fw,
contradicting (L1), or fw>=fs contradicting K2b.
- (K2c): if a not in s, we hae no further opportunity to apply the
work item.
NB: this reasoning isn't water tight.
Key lemma to make it watertight.
Under the conditions of the Main Theorem,
forall f st fw >= f, a is not in S^k(f,t), for any k
Also, consider roles more carefully. See Note [Flavours with roles].
Completeness
~~~~~~~~~~~~~
K3: completeness. (K3) is not necessary for the extended substitution
to be inert. In fact K1 could be made stronger by saying
... then (not (fw >= fs) or not (fs >= fs))
But it's not enough for S to be inert; we also want completeness.
That is, we want to be able to solve all soluble wanted equalities.
Suppose we have
work-item b -G-> a
inert-item a -W-> b
Assuming (G >= W) but not (W >= W), this fulfills all the conditions,
so we could extend the inerts, thus:
inert-items b -G-> a
a -W-> b
But if we kicked-out the inert item, we'd get
work-item a -W-> b
inert-item b -G-> a
Then rewrite the work-item gives us (a -W-> a), which is soluble via Refl.
So we add one more clause to the kick-out criteria
Another way to understand (K3) is that we treat an inert item
a -f-> b
in the same way as
b -f-> a
So if we kick out one, we should kick out the other. The orientation
is somewhat accidental.
When considering roles, we also need the second clause (K3b). Consider
inert-item a -W/R-> b c
work-item c -G/N-> a
The work-item doesn't get rewritten by the inert, because (>=) doesn't hold.
We've satisfied conditions (T1)-(T3) and (K1) and (K2). If all we had were
condition (K3a), then we would keep the inert around and add the work item.
But then, consider if we hit the following:
work-item2 b -G/N-> Id
where
newtype Id x = Id x
For similar reasons, if we only had (K3a), we wouldn't kick the
representational inert out. And then, we'd miss solving the inert, which
now reduced to reflexivity. The solution here is to kick out representational
inerts whenever the tyvar of a work item is "exposed", where exposed means
not under some proper data-type constructor, like [] or Maybe. See
isTyVarExposed in TcType. This is encoded in (K3b).
Note [Flavours with roles]
~~~~~~~~~~~~~~~~~~~~~~~~~~
The system described in Note [The inert equalities] discusses an abstract
set of flavours. In GHC, flavours have two components: the flavour proper,
taken from {Wanted, Derived, Given}; and the equality relation (often called
role), taken from {NomEq, ReprEq}. When substituting w.r.t. the inert set,
as described in Note [The inert equalities], we must be careful to respect
roles. For example, if we have
inert set: a -G/R-> Int
b -G/R-> Bool
type role T nominal representational
and we wish to compute S(W/R, T a b), the correct answer is T a Bool, NOT
T Int Bool. The reason is that T's first parameter has a nominal role, and
thus rewriting a to Int in T a b is wrong. Indeed, this non-congruence of
subsitution means that the proof in Note [The inert equalities] may need
to be revisited, but we don't think that the end conclusion is wrong.
-}
flattenTyVar :: FlattenEnv -> TcTyVar -> TcS (Xi, TcCoercion)
-- "Flattening" a type variable means to apply the substitution to it
-- The substitution is actually the union of
-- * the unifications that have taken place (either before the
-- solver started, or in TcInteract.solveByUnification)
-- * the CTyEqCans held in the inert set
--
-- Postcondition: co : xi ~ tv
flattenTyVar fmode tv
= do { mb_yes <- flattenTyVarOuter fmode tv
; case mb_yes of
Left tv' -> -- Done
do { traceTcS "flattenTyVar1" (ppr tv $$ ppr (tyVarKind tv'))
; return (ty', mkTcReflCo (feRole fmode) ty') }
where
ty' = mkTyVarTy tv'
Right (ty1, co1) -- Recurse
-> do { (ty2, co2) <- flatten_one fmode ty1
; traceTcS "flattenTyVar3" (ppr tv $$ ppr ty2)
; return (ty2, co2 `mkTcTransCo` co1) }
}
flattenTyVarOuter :: FlattenEnv -> TcTyVar
-> TcS (Either TyVar (TcType, TcCoercion))
-- Look up the tyvar in
-- a) the internal MetaTyVar box
-- b) the tyvar binds
-- c) the inerts
-- Return (Left tv') if it is not found, tv' has a properly zonked kind
-- (Right (ty, co) if found, with co :: ty ~ tv;
flattenTyVarOuter fmode tv
| not (isTcTyVar tv) -- Happens when flatten under a (forall a. ty)
= Left `liftM` flattenTyVarFinal fmode tv
-- So ty contains refernces to the non-TcTyVar a
| otherwise
= do { mb_ty <- isFilledMetaTyVar_maybe tv
; case mb_ty of {
Just ty -> do { traceTcS "Following filled tyvar" (ppr tv <+> equals <+> ppr ty)
; return (Right (ty, mkTcReflCo (feRole fmode) ty)) } ;
Nothing ->
-- Try in the inert equalities
-- See Definition [Applying a generalised substitution]
do { ieqs <- getInertEqs
; case lookupVarEnv ieqs tv of
Just (ct:_) -- If the first doesn't work,
-- the subsequent ones won't either
| CTyEqCan { cc_ev = ctev, cc_tyvar = tv, cc_rhs = rhs_ty } <- ct
, ctEvFlavourRole ctev `eqCanRewriteFR` feFlavourRole fmode
-> do { traceTcS "Following inert tyvar" (ppr tv <+> equals <+> ppr rhs_ty $$ ppr ctev)
; let rewrite_co1 = mkTcSymCo (ctEvCoercion ctev)
rewrite_co = case (ctEvEqRel ctev, fe_eq_rel fmode) of
(ReprEq, _rel) -> ASSERT( _rel == ReprEq )
-- if this ASSERT fails, then
-- eqCanRewriteFR answered incorrectly
rewrite_co1
(NomEq, NomEq) -> rewrite_co1
(NomEq, ReprEq) -> mkTcSubCo rewrite_co1
; return (Right (rhs_ty, rewrite_co)) }
-- NB: ct is Derived then fmode must be also, hence
-- we are not going to touch the returned coercion
-- so ctEvCoercion is fine.
_other -> Left `liftM` flattenTyVarFinal fmode tv
} } }
flattenTyVarFinal :: FlattenEnv -> TcTyVar -> TcS TyVar
flattenTyVarFinal fmode tv
= -- Done, but make sure the kind is zonked
do { let kind = tyVarKind tv
kind_fmode = setFEMode fmode FM_SubstOnly
; (new_knd, _kind_co) <- flatten_one kind_fmode kind
; return (setVarType tv new_knd) }
{-
Note [An alternative story for the inert substitution]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(This entire note is just background, left here in case we ever want
to return the the previousl state of affairs)
We used (GHC 7.8) to have this story for the inert substitution inert_eqs
* 'a' is not in fvs(ty)
* They are *inert* in the weaker sense that there is no infinite chain of
(i1 `eqCanRewrite` i2), (i2 `eqCanRewrite` i3), etc
This means that flattening must be recursive, but it does allow
[G] a ~ [b]
[G] b ~ Maybe c
This avoids "saturating" the Givens, which can save a modest amount of work.
It is easy to implement, in TcInteract.kick_out, by only kicking out an inert
only if (a) the work item can rewrite the inert AND
(b) the inert cannot rewrite the work item
This is signifcantly harder to think about. It can save a LOT of work
in occurs-check cases, but we don't care about them much. Trac #5837
is an example; all the constraints here are Givens
[G] a ~ TF (a,Int)
-->
work TF (a,Int) ~ fsk
inert fsk ~ a
--->
work fsk ~ (TF a, TF Int)
inert fsk ~ a
--->
work a ~ (TF a, TF Int)
inert fsk ~ a
---> (attempting to flatten (TF a) so that it does not mention a
work TF a ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (substitute for a)
work TF (fsk2, TF Int) ~ fsk2
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (top-level reduction, re-orient)
work fsk2 ~ (TF fsk2, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
---> (attempt to flatten (TF fsk2) to get rid of fsk2
work TF fsk2 ~ fsk3
work fsk2 ~ (fsk3, TF Int)
inert a ~ (fsk2, TF Int)
inert fsk ~ (fsk2, TF Int)
--->
work TF fsk2 ~ fsk3
inert fsk2 ~ (fsk3, TF Int)
inert a ~ ((fsk3, TF Int), TF Int)
inert fsk ~ ((fsk3, TF Int), TF Int)
Because the incoming given rewrites all the inert givens, we get more and
more duplication in the inert set. But this really only happens in pathalogical
casee, so we don't care.
-}
eqCanRewrite :: CtEvidence -> CtEvidence -> Bool
eqCanRewrite ev1 ev2 = ctEvFlavourRole ev1 `eqCanRewriteFR` ctEvFlavourRole ev2
-- | Whether or not one 'Ct' can rewrite another is determined by its
-- flavour and its equality relation
type CtFlavourRole = (CtFlavour, EqRel)
-- | Extract the flavour and role from a 'CtEvidence'
ctEvFlavourRole :: CtEvidence -> CtFlavourRole
ctEvFlavourRole ev = (ctEvFlavour ev, ctEvEqRel ev)
-- | Extract the flavour and role from a 'Ct'
ctFlavourRole :: Ct -> CtFlavourRole
ctFlavourRole = ctEvFlavourRole . cc_ev
-- | Extract the flavour and role from a 'FlattenEnv'
feFlavourRole :: FlattenEnv -> CtFlavourRole
feFlavourRole (FE { fe_flavour = flav, fe_eq_rel = eq_rel })
= (flav, eq_rel)
eqCanRewriteFR :: CtFlavourRole -> CtFlavourRole -> Bool
-- Very important function!
-- See Note [eqCanRewrite]
eqCanRewriteFR (Given, NomEq) (_, _) = True
eqCanRewriteFR (Given, ReprEq) (_, ReprEq) = True
eqCanRewriteFR _ _ = False
canRewriteOrSame :: CtEvidence -> CtEvidence -> Bool
-- See Note [canRewriteOrSame]
canRewriteOrSame ev1 ev2 = ev1 `eqCanRewrite` ev2 ||
ctEvFlavourRole ev1 == ctEvFlavourRole ev2
canRewriteOrSameFR :: CtFlavourRole -> CtFlavourRole -> Bool
canRewriteOrSameFR fr1 fr2 = fr1 `eqCanRewriteFR` fr2 || fr1 == fr2
{-
Note [eqCanRewrite]
~~~~~~~~~~~~~~~~~~~
(eqCanRewrite ct1 ct2) holds if the constraint ct1 (a CTyEqCan of form
tv ~ ty) can be used to rewrite ct2. It must satisfy the properties of
a can-rewrite relation, see Definition [Can-rewrite relation]
At the moment we don't allow Wanteds to rewrite Wanteds, because that can give
rise to very confusing type error messages. A good example is Trac #8450.
Here's another
f :: a -> Bool
f x = ( [x,'c'], [x,True] ) `seq` True
Here we get
[W] a ~ Char
[W] a ~ Bool
but we do not want to complain about Bool ~ Char!
Accordingly, we also don't let Deriveds rewrite Deriveds.
With the solver handling Coercible constraints like equality constraints,
the rewrite conditions must take role into account, never allowing
a representational equality to rewrite a nominal one.
Note [canRewriteOrSame]
~~~~~~~~~~~~~~~~~~~~~~~
canRewriteOrSame is similar but
* returns True for Wanted/Wanted.
* works for all kinds of constraints, not just CTyEqCans
See the call sites for explanations.
************************************************************************
* *
Unflattening
* *
************************************************************************
An unflattening example:
[W] F a ~ alpha
flattens to
[W] F a ~ fmv (CFunEqCan)
[W] fmv ~ alpha (CTyEqCan)
We must solve both!
-}
unflatten :: Cts -> Cts -> TcS Cts
unflatten tv_eqs funeqs
= do { dflags <- getDynFlags
; tclvl <- getTcLevel
; traceTcS "Unflattening" $ braces $
vcat [ ptext (sLit "Funeqs =") <+> pprCts funeqs
, ptext (sLit "Tv eqs =") <+> pprCts tv_eqs ]
-- Step 1: unflatten the CFunEqCans, except if that causes an occurs check
-- See Note [Unflatten using funeqs first]
; funeqs <- foldrBagM (unflatten_funeq dflags) emptyCts funeqs
; traceTcS "Unflattening 1" $ braces (pprCts funeqs)
-- Step 2: unify the irreds, if possible
; tv_eqs <- foldrBagM (unflatten_eq dflags tclvl) emptyCts tv_eqs
; traceTcS "Unflattening 2" $ braces (pprCts tv_eqs)
-- Step 3: fill any remaining fmvs with fresh unification variables
; funeqs <- mapBagM finalise_funeq funeqs
; traceTcS "Unflattening 3" $ braces (pprCts funeqs)
-- Step 4: remove any irreds that look like ty ~ ty
; tv_eqs <- foldrBagM finalise_eq emptyCts tv_eqs
; let all_flat = tv_eqs `andCts` funeqs
; traceTcS "Unflattening done" $ braces (pprCts all_flat)
; return all_flat }
where
----------------
unflatten_funeq :: DynFlags -> Ct -> Cts -> TcS Cts
unflatten_funeq dflags ct@(CFunEqCan { cc_fun = tc, cc_tyargs = xis
, cc_fsk = fmv, cc_ev = ev }) rest
= do { -- fmv should be a flatten meta-tv; we now fix its final
-- value, and then zonking will eliminate it
filled <- tryFill dflags fmv (mkTyConApp tc xis) ev
; return (if filled then rest else ct `consCts` rest) }
unflatten_funeq _ other_ct _
= pprPanic "unflatten_funeq" (ppr other_ct)
----------------
finalise_funeq :: Ct -> TcS Ct
finalise_funeq (CFunEqCan { cc_fsk = fmv, cc_ev = ev })
= do { demoteUnfilledFmv fmv
; return (mkNonCanonical ev) }
finalise_funeq ct = pprPanic "finalise_funeq" (ppr ct)
----------------
unflatten_eq :: DynFlags -> TcLevel -> Ct -> Cts -> TcS Cts
unflatten_eq dflags tclvl ct@(CTyEqCan { cc_ev = ev, cc_tyvar = tv, cc_rhs = rhs }) rest
| isFmvTyVar tv
= do { lhs_elim <- tryFill dflags tv rhs ev
; if lhs_elim then return rest else
do { rhs_elim <- try_fill dflags tclvl ev rhs (mkTyVarTy tv)
; if rhs_elim then return rest else
return (ct `consCts` rest) } }
| otherwise
= return (ct `consCts` rest)
unflatten_eq _ _ ct _ = pprPanic "unflatten_irred" (ppr ct)
----------------
finalise_eq :: Ct -> Cts -> TcS Cts
finalise_eq (CTyEqCan { cc_ev = ev, cc_tyvar = tv
, cc_rhs = rhs, cc_eq_rel = eq_rel }) rest
| isFmvTyVar tv
= do { ty1 <- zonkTcTyVar tv
; ty2 <- zonkTcType rhs
; let is_refl = ty1 `tcEqType` ty2
; if is_refl then do { when (isWanted ev) $
setEvBind (ctEvId ev)
(EvCoercion $
mkTcReflCo (eqRelRole eq_rel) rhs)
; return rest }
else return (mkNonCanonical ev `consCts` rest) }
| otherwise
= return (mkNonCanonical ev `consCts` rest)
finalise_eq ct _ = pprPanic "finalise_irred" (ppr ct)
----------------
try_fill dflags tclvl ev ty1 ty2
| Just tv1 <- tcGetTyVar_maybe ty1
, isTouchableOrFmv tclvl tv1
, typeKind ty1 `isSubKind` tyVarKind tv1
= tryFill dflags tv1 ty2 ev
| otherwise
= return False
tryFill :: DynFlags -> TcTyVar -> TcType -> CtEvidence -> TcS Bool
-- (tryFill tv rhs ev) sees if 'tv' is an un-filled MetaTv
-- If so, and if tv does not appear in 'rhs', set tv := rhs
-- bind the evidence (which should be a CtWanted) to Refl<rhs>
-- and return True. Otherwise return False
tryFill dflags tv rhs ev
= ASSERT2( not (isGiven ev), ppr ev )
do { is_filled <- isFilledMetaTyVar tv
; if is_filled then return False else
do { rhs' <- zonkTcType rhs
; case occurCheckExpand dflags tv rhs' of
OC_OK rhs'' -- Normal case: fill the tyvar
-> do { when (isWanted ev) $
setEvBind (ctEvId ev)
(EvCoercion (mkTcReflCo (ctEvRole ev) rhs''))
; setWantedTyBind tv rhs''
; return True }
_ -> -- Occurs check
return False } }
{-
Note [Unflatten using funeqs first]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
[W] G a ~ Int
[W] F (G a) ~ G a
do not want to end up with
[W} F Int ~ Int
because that might actually hold! Better to end up with the two above
unsolved constraints. The flat form will be
G a ~ fmv1 (CFunEqCan)
F fmv1 ~ fmv2 (CFunEqCan)
fmv1 ~ Int (CTyEqCan)
fmv1 ~ fmv2 (CTyEqCan)
Flatten using the fun-eqs first.
-}
-- | Change the 'EqRel' in a 'FlattenEnv'. Avoids allocating a
-- new 'FlattenEnv' where possible.
setFEEqRel :: FlattenEnv -> EqRel -> FlattenEnv
setFEEqRel fmode@(FE { fe_eq_rel = old_eq_rel }) new_eq_rel
| old_eq_rel == new_eq_rel = fmode
| otherwise = fmode { fe_eq_rel = new_eq_rel }
-- | Change the 'FlattenMode' in a 'FlattenEnv'. Avoids allocating
-- a new 'FlattenEnv' where possible.
setFEMode :: FlattenEnv -> FlattenMode -> FlattenEnv
setFEMode fmode@(FE { fe_mode = old_mode }) new_mode
| old_mode `eq` new_mode = fmode
| otherwise = fmode { fe_mode = new_mode }
where
FM_FlattenAll `eq` FM_FlattenAll = True
FM_SubstOnly `eq` FM_SubstOnly = True
FM_Avoid tv1 b1 `eq` FM_Avoid tv2 b2 = tv1 == tv2 && b1 == b2
_ `eq` _ = False
| forked-upstream-packages-for-ghcjs/ghc | compiler/typecheck/TcFlatten.hs | bsd-3-clause | 58,044 | 47 | 27 | 16,494 | 5,179 | 2,789 | 2,390 | -1 | -1 |
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
{-# LANGUAGE RecordWildCards #-}
import Data.Foldable (for_)
import Test.Hspec (Spec, describe, it, shouldBe)
import Test.Hspec.Runner (configFastFail, defaultConfig, hspecWith)
import CollatzConjecture (collatz)
main :: IO ()
main = hspecWith defaultConfig {configFastFail = True} specs
specs :: Spec
specs = describe "collatz" $ for_ cases test
where
test Case{..} = it description assertion
where
assertion = collatz number `shouldBe` expected
data Case = Case { description :: String
, number :: Integer
, expected :: Maybe Integer
}
cases :: [Case]
cases = [ Case { description = "zero steps for one"
, number = 1
, expected = Just 0
}
, Case { description = "divide if even"
, number = 16
, expected = Just 4
}
, Case { description = "even and odd steps"
, number = 12
, expected = Just 9
}
, Case { description = "Large number of even and odd steps"
, number = 1000000
, expected = Just 152
}
, Case { description = "zero is an error"
, number = 0
, expected = Nothing
}
, Case { description = "negative value is an error"
, number = -15
, expected = Nothing
}
]
| mukeshtiwari/Excercism | haskell/collatz-conjecture/test/Tests.hs | mit | 1,578 | 0 | 10 | 665 | 339 | 202 | 137 | 34 | 1 |
module ComplexParamIn2 where
--The application of a function is replaced by the right-hand side of the definition,
--with actual parameters replacing formals.
data Tup a b = Tup a b
--In this example, unfold the first 'sq' in 'sumSquares'
--This example aims to test unfolding a definition with guards.
sumSquares x y = (case (x, y) of
(m, n) -> m ^ n)
sq (Tup n m) = m^n
| mpickering/HaRe | old/testing/foldDef/ComplexParamIn2.hs | bsd-3-clause | 402 | 0 | 9 | 99 | 80 | 47 | 33 | 5 | 1 |
module A2 where
--Any type/data constructor name declared in this module can be renamed.
--Any type variable can be renamed.
--Rename type Constructor 'BTree' to 'MyBTree'
data BTree a = Empty | T a (BTree a) (BTree a)
deriving Show
buildtree :: Ord a => [a] -> BTree a
buildtree [] = Empty
buildtree (x:xs) = insert x (buildtree xs)
insert :: Ord a => a -> BTree a -> BTree a
insert val Empty = T val Empty Empty
insert val t@(T tval left right)
| val > tval = T tval left (insert val right)
| otherwise = t
main :: BTree Int
main = buildtree [3,1,2]
| kmate/HaRe | old/testing/unfoldAsPatterns/A2.hs | bsd-3-clause | 592 | 0 | 8 | 149 | 222 | 114 | 108 | 13 | 1 |
module Main (main) where
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription
import Distribution.PackageDescription.Check hiding (doesFileExist)
import Distribution.PackageDescription.Configuration
import Distribution.PackageDescription.Parse
import Distribution.Package
import Distribution.System
import Distribution.Simple
import Distribution.Simple.Configure
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.GHC
import Distribution.Simple.Program
import Distribution.Simple.Program.HcPkg
import Distribution.Simple.Setup (ConfigFlags(configStripLibs), fromFlag, toFlag)
import Distribution.Simple.Utils (defaultPackageDesc, writeFileAtomic, toUTF8)
import Distribution.Simple.Build (writeAutogenFiles)
import Distribution.Simple.Register
import Distribution.Text
import Distribution.Verbosity
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Control.Exception (bracket)
import Control.Monad
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.List
import Data.Maybe
import System.IO
import System.Directory
import System.Environment
import System.Exit (exitWith, ExitCode(..))
import System.FilePath
main :: IO ()
main = do hSetBuffering stdout LineBuffering
args <- getArgs
case args of
"hscolour" : dir : distDir : args' ->
runHsColour dir distDir args'
"check" : dir : [] ->
doCheck dir
"copy" : dir : distDir
: strip : myDestDir : myPrefix : myLibdir : myDocdir
: ghcLibWays : args' ->
doCopy dir distDir
strip myDestDir myPrefix myLibdir myDocdir
("dyn" `elem` words ghcLibWays)
args'
"register" : dir : distDir : ghc : ghcpkg : topdir
: myDestDir : myPrefix : myLibdir : myDocdir
: relocatableBuild : args' ->
doRegister dir distDir ghc ghcpkg topdir
myDestDir myPrefix myLibdir myDocdir
relocatableBuild args'
"configure" : dir : distDir : dll0Modules : config_args ->
generate dir distDir dll0Modules config_args
"sdist" : dir : distDir : [] ->
doSdist dir distDir
["--version"] ->
defaultMainArgs ["--version"]
_ -> die syntax_error
syntax_error :: [String]
syntax_error =
["syntax: ghc-cabal configure <configure-args> -- <distdir> <directory>...",
" ghc-cabal install <ghc-pkg> <directory> <distdir> <destdir> <prefix> <args>...",
" ghc-cabal hscolour <distdir> <directory> <args>..."]
die :: [String] -> IO a
die errs = do mapM_ (hPutStrLn stderr) errs
exitWith (ExitFailure 1)
withCurrentDirectory :: FilePath -> IO a -> IO a
withCurrentDirectory directory io
= bracket (getCurrentDirectory) (setCurrentDirectory)
(const (setCurrentDirectory directory >> io))
-- We need to use the autoconfUserHooks, as the packages that use
-- configure can create a .buildinfo file, and we need any info that
-- ends up in it.
userHooks :: UserHooks
userHooks = autoconfUserHooks
runDefaultMain :: IO ()
runDefaultMain
= do let verbosity = normal
gpdFile <- defaultPackageDesc verbosity
gpd <- readPackageDescription verbosity gpdFile
case buildType (flattenPackageDescription gpd) of
Just Configure -> defaultMainWithHooks autoconfUserHooks
-- time has a "Custom" Setup.hs, but it's actually Configure
-- plus a "./Setup test" hook. However, Cabal is also
-- "Custom", but doesn't have a configure script.
Just Custom ->
do configureExists <- doesFileExist "configure"
if configureExists
then defaultMainWithHooks autoconfUserHooks
else defaultMain
-- not quite right, but good enough for us:
_ -> defaultMain
doSdist :: FilePath -> FilePath -> IO ()
doSdist directory distDir
= withCurrentDirectory directory
$ withArgs (["sdist", "--builddir", distDir])
runDefaultMain
doCheck :: FilePath -> IO ()
doCheck directory
= withCurrentDirectory directory
$ do let verbosity = normal
gpdFile <- defaultPackageDesc verbosity
gpd <- readPackageDescription verbosity gpdFile
case filter isFailure $ checkPackage gpd Nothing of
[] -> return ()
errs -> mapM_ print errs >> exitWith (ExitFailure 1)
where isFailure (PackageDistSuspicious {}) = False
isFailure (PackageDistSuspiciousWarn {}) = False
isFailure _ = True
runHsColour :: FilePath -> FilePath -> [String] -> IO ()
runHsColour directory distdir args
= withCurrentDirectory directory
$ defaultMainArgs ("hscolour" : "--builddir" : distdir : args)
doCopy :: FilePath -> FilePath
-> FilePath -> FilePath -> FilePath -> FilePath -> FilePath -> Bool
-> [String]
-> IO ()
doCopy directory distDir
strip myDestDir myPrefix myLibdir myDocdir withSharedLibs
args
= withCurrentDirectory directory $ do
let copyArgs = ["copy", "--builddir", distDir]
++ (if null myDestDir
then []
else ["--destdir", myDestDir])
++ args
copyHooks = userHooks {
copyHook = noGhcPrimHook
$ modHook False
$ copyHook userHooks
}
defaultMainWithHooksArgs copyHooks copyArgs
where
noGhcPrimHook f pd lbi us flags
= let pd'
| packageName pd == PackageName "ghc-prim" =
case library pd of
Just lib ->
let ghcPrim = fromJust (simpleParse "GHC.Prim")
ems = filter (ghcPrim /=) (exposedModules lib)
lib' = lib { exposedModules = ems }
in pd { library = Just lib' }
Nothing ->
error "Expected a library, but none found"
| otherwise = pd
in f pd' lbi us flags
modHook relocatableBuild f pd lbi us flags
= do let verbosity = normal
idts = updateInstallDirTemplates relocatableBuild
myPrefix myLibdir myDocdir
(installDirTemplates lbi)
progs = withPrograms lbi
stripProgram' = stripProgram {
programFindLocation = \_ _ -> return (Just strip) }
progs' <- configureProgram verbosity stripProgram' progs
let lbi' = lbi {
withPrograms = progs',
installDirTemplates = idts,
configFlags = cfg,
stripLibs = fromFlag (configStripLibs cfg),
withSharedLib = withSharedLibs
}
-- This hack allows to interpret the "strip"
-- command-line argument being set to ':' to signify
-- disabled library stripping
cfg | strip == ":" = (configFlags lbi) { configStripLibs = toFlag False }
| otherwise = configFlags lbi
f pd lbi' us flags
doRegister :: FilePath -> FilePath -> FilePath -> FilePath
-> FilePath -> FilePath -> FilePath -> FilePath -> FilePath
-> String -> [String]
-> IO ()
doRegister directory distDir ghc ghcpkg topdir
myDestDir myPrefix myLibdir myDocdir
relocatableBuildStr args
= withCurrentDirectory directory $ do
relocatableBuild <- case relocatableBuildStr of
"YES" -> return True
"NO" -> return False
_ -> die ["Bad relocatableBuildStr: " ++
show relocatableBuildStr]
let regArgs = "register" : "--builddir" : distDir : args
regHooks = userHooks {
regHook = modHook relocatableBuild
$ regHook userHooks
}
defaultMainWithHooksArgs regHooks regArgs
where
modHook relocatableBuild f pd lbi us flags
= do let verbosity = normal
idts = updateInstallDirTemplates relocatableBuild
myPrefix myLibdir myDocdir
(installDirTemplates lbi)
progs = withPrograms lbi
ghcpkgconf = topdir </> "package.conf.d"
ghcProgram' = ghcProgram {
programPostConf = \_ cp -> return cp { programDefaultArgs = ["-B" ++ topdir] },
programFindLocation = \_ _ -> return (Just ghc) }
ghcPkgProgram' = ghcPkgProgram {
programPostConf = \_ cp -> return cp { programDefaultArgs =
["--global-package-db", ghcpkgconf]
++ ["--force" | not (null myDestDir) ] },
programFindLocation = \_ _ -> return (Just ghcpkg) }
configurePrograms ps conf = foldM (flip (configureProgram verbosity)) conf ps
progs' <- configurePrograms [ghcProgram', ghcPkgProgram'] progs
instInfos <- dump (hcPkgInfo progs') verbosity GlobalPackageDB
let installedPkgs' = PackageIndex.fromList instInfos
let updateComponentConfig (cn, clbi, deps)
= (cn, updateComponentLocalBuildInfo clbi, deps)
updateComponentLocalBuildInfo clbi
= clbi {
componentPackageDeps =
[ (fixupPackageId instInfos ipid, pid)
| (ipid,pid) <- componentPackageDeps clbi ]
}
ccs' = map updateComponentConfig (componentsConfigs lbi)
lbi' = lbi {
componentsConfigs = ccs',
installedPkgs = installedPkgs',
installDirTemplates = idts,
withPrograms = progs'
}
f pd lbi' us flags
updateInstallDirTemplates :: Bool -> FilePath -> FilePath -> FilePath
-> InstallDirTemplates
-> InstallDirTemplates
updateInstallDirTemplates relocatableBuild myPrefix myLibdir myDocdir idts
= idts {
prefix = toPathTemplate $
if relocatableBuild
then "$topdir"
else myPrefix,
libdir = toPathTemplate $
if relocatableBuild
then "$topdir"
else myLibdir,
libsubdir = toPathTemplate "$libname",
docdir = toPathTemplate $
if relocatableBuild
then "$topdir/../doc/html/libraries/$pkgid"
else (myDocdir </> "$pkgid"),
htmldir = toPathTemplate "$docdir"
}
-- The packages are built with the package ID ending in "-inplace", but
-- when they're installed they get the package hash appended. We need to
-- fix up the package deps so that they use the hash package IDs, not
-- the inplace package IDs.
fixupPackageId :: [Installed.InstalledPackageInfo]
-> InstalledPackageId
-> InstalledPackageId
fixupPackageId _ x@(InstalledPackageId ipi)
| "builtin_" `isPrefixOf` ipi = x
fixupPackageId ipinfos (InstalledPackageId ipi)
= case stripPrefix (reverse "-inplace") $ reverse ipi of
Nothing ->
error ("Installed package ID doesn't end in -inplace: " ++ show ipi)
Just x ->
let ipi' = reverse ('-' : x)
f (ipinfo : ipinfos') = case Installed.installedPackageId ipinfo of
y@(InstalledPackageId ipinfoid)
| ipi' `isPrefixOf` ipinfoid ->
y
_ ->
f ipinfos'
f [] = error ("Installed package ID not registered: " ++ show ipi)
in f ipinfos
-- On Windows we need to split the ghc package into 2 pieces, or the
-- DLL that it makes contains too many symbols (#5987). There are
-- therefore 2 libraries, not just the 1 that Cabal assumes.
mangleIPI :: FilePath -> FilePath -> LocalBuildInfo
-> Installed.InstalledPackageInfo -> Installed.InstalledPackageInfo
mangleIPI "compiler" "stage2" lbi ipi
| isWindows =
-- Cabal currently only ever installs ONE Haskell library, c.f.
-- the code in Cabal.Distribution.Simple.Register. If it
-- ever starts installing more we'll have to find the
-- library that's too big and split that.
let [old_hslib] = Installed.hsLibraries ipi
in ipi {
Installed.hsLibraries = [old_hslib, old_hslib ++ "-0"]
}
where isWindows = case hostPlatform lbi of
Platform _ Windows -> True
_ -> False
mangleIPI _ _ _ ipi = ipi
generate :: FilePath -> FilePath -> String -> [String] -> IO ()
generate directory distdir dll0Modules config_args
= withCurrentDirectory directory
$ do let verbosity = normal
-- XXX We shouldn't just configure with the default flags
-- XXX And this, and thus the "getPersistBuildConfig distdir" below,
-- aren't going to work when the deps aren't built yet
withArgs (["configure", "--distdir", distdir] ++ config_args)
runDefaultMain
lbi <- getPersistBuildConfig distdir
let pd0 = localPkgDescr lbi
writePersistBuildConfig distdir lbi
hooked_bi <-
if (buildType pd0 == Just Configure) || (buildType pd0 == Just Custom)
then do
maybe_infoFile <- defaultHookedPackageDesc
case maybe_infoFile of
Nothing -> return emptyHookedBuildInfo
Just infoFile -> readHookedBuildInfo verbosity infoFile
else
return emptyHookedBuildInfo
let pd = updatePackageDescription hooked_bi pd0
-- generate Paths_<pkg>.hs and cabal-macros.h
writeAutogenFiles verbosity pd lbi
-- generate inplace-pkg-config
withLibLBI pd lbi $ \lib clbi ->
do cwd <- getCurrentDirectory
let ipid = InstalledPackageId (display (packageId pd) ++ "-inplace")
let installedPkgInfo = inplaceInstalledPackageInfo cwd distdir
pd ipid lib lbi clbi
final_ipi = mangleIPI directory distdir lbi $ installedPkgInfo {
Installed.installedPackageId = ipid,
Installed.haddockHTMLs = []
}
content = Installed.showInstalledPackageInfo final_ipi ++ "\n"
writeFileAtomic (distdir </> "inplace-pkg-config") (BS.pack $ toUTF8 content)
let
comp = compiler lbi
libBiModules lib = (libBuildInfo lib, libModules lib)
exeBiModules exe = (buildInfo exe, ModuleName.main : exeModules exe)
biModuless = (maybeToList $ fmap libBiModules $ library pd)
++ (map exeBiModules $ executables pd)
buildableBiModuless = filter isBuildable biModuless
where isBuildable (bi', _) = buildable bi'
(bi, modules) = case buildableBiModuless of
[] -> error "No buildable component found"
[biModules] -> biModules
_ -> error ("XXX ghc-cabal can't handle " ++
"more than one buildinfo yet")
-- XXX Another Just...
Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
dep_pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi))
forDeps f = concatMap f dep_pkgs
-- copied from Distribution.Simple.PreProcess.ppHsc2Hs
packageHacks = case compilerFlavor (compiler lbi) of
GHC -> hackRtsPackage
_ -> id
-- We don't link in the actual Haskell libraries of our
-- dependencies, so the -u flags in the ldOptions of the rts
-- package mean linking fails on OS X (it's ld is a tad
-- stricter than gnu ld). Thus we remove the ldOptions for
-- GHC's rts package:
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_,[rts])] ->
PackageIndex.insert rts{
Installed.ldOptions = [],
Installed.libraryDirs = filter (not . ("gcc-lib" `isSuffixOf`)) (Installed.libraryDirs rts)} index
-- GHC <= 6.12 had $topdir/gcc-lib in their
-- library-dirs for the rts package, which causes
-- problems when we try to use the in-tree mingw,
-- due to accidentally picking up the incompatible
-- libraries there. So we filter out gcc-lib from
-- the RTS's library-dirs here.
_ -> error "No (or multiple) ghc rts package is registered!!"
dep_ids = map snd (externalPackageDeps lbi)
deps = map display dep_ids
dep_direct = map (fromMaybe (error "ghc-cabal: dep_keys failed")
. PackageIndex.lookupInstalledPackageId
(installedPkgs lbi)
. fst)
. externalPackageDeps
$ lbi
dep_ipids = map (display . Installed.installedPackageId) dep_direct
depLibNames
| packageKeySupported comp
= map (display . Installed.libraryName) dep_direct
| otherwise = deps
depNames = map (display . packageName) dep_ids
transitive_dep_ids = map Installed.sourcePackageId dep_pkgs
transitiveDeps = map display transitive_dep_ids
transitiveDepLibNames
| packageKeySupported comp
= map (display . Installed.libraryName) dep_pkgs
| otherwise = transitiveDeps
transitiveDepNames = map (display . packageName) transitive_dep_ids
libraryDirs = forDeps Installed.libraryDirs
-- The mkLibraryRelDir function is a bit of a hack.
-- Ideally it should be handled in the makefiles instead.
mkLibraryRelDir "rts" = "rts/dist/build"
mkLibraryRelDir "ghc" = "compiler/stage2/build"
mkLibraryRelDir "Cabal" = "libraries/Cabal/Cabal/dist-install/build"
mkLibraryRelDir l = "libraries/" ++ l ++ "/dist-install/build"
libraryRelDirs = map mkLibraryRelDir transitiveDepNames
wrappedIncludeDirs <- wrap $ forDeps Installed.includeDirs
wrappedLibraryDirs <- wrap libraryDirs
let variablePrefix = directory ++ '_':distdir
mods = map display modules
otherMods = map display (otherModules bi)
allMods = mods ++ otherMods
let xs = [variablePrefix ++ "_VERSION = " ++ display (pkgVersion (package pd)),
-- TODO: move inside withLibLBI
variablePrefix ++ "_PACKAGE_KEY = " ++ display (localPackageKey lbi),
-- copied from mkComponentsLocalBuildInfo
variablePrefix ++ "_LIB_NAME = " ++ display (localLibraryName lbi),
variablePrefix ++ "_MODULES = " ++ unwords mods,
variablePrefix ++ "_HIDDEN_MODULES = " ++ unwords otherMods,
variablePrefix ++ "_SYNOPSIS =" ++ synopsis pd,
variablePrefix ++ "_HS_SRC_DIRS = " ++ unwords (hsSourceDirs bi),
variablePrefix ++ "_DEPS = " ++ unwords deps,
variablePrefix ++ "_DEP_IPIDS = " ++ unwords dep_ipids,
variablePrefix ++ "_DEP_NAMES = " ++ unwords depNames,
variablePrefix ++ "_DEP_LIB_NAMES = " ++ unwords depLibNames,
variablePrefix ++ "_TRANSITIVE_DEPS = " ++ unwords transitiveDeps,
variablePrefix ++ "_TRANSITIVE_DEP_LIB_NAMES = " ++ unwords transitiveDepLibNames,
variablePrefix ++ "_TRANSITIVE_DEP_NAMES = " ++ unwords transitiveDepNames,
variablePrefix ++ "_INCLUDE_DIRS = " ++ unwords (includeDirs bi),
variablePrefix ++ "_INCLUDES = " ++ unwords (includes bi),
variablePrefix ++ "_INSTALL_INCLUDES = " ++ unwords (installIncludes bi),
variablePrefix ++ "_EXTRA_LIBRARIES = " ++ unwords (extraLibs bi),
variablePrefix ++ "_EXTRA_LIBDIRS = " ++ unwords (extraLibDirs bi),
variablePrefix ++ "_C_SRCS = " ++ unwords (cSources bi),
variablePrefix ++ "_CMM_SRCS := $(addprefix cbits/,$(notdir $(wildcard " ++ directory ++ "/cbits/*.cmm)))",
variablePrefix ++ "_DATA_FILES = " ++ unwords (dataFiles pd),
-- XXX This includes things it shouldn't, like:
-- -odir dist-bootstrapping/build
variablePrefix ++ "_HC_OPTS = " ++ escape (unwords
( programDefaultArgs ghcProg
++ hcOptions GHC bi
++ languageToFlags (compiler lbi) (defaultLanguage bi)
++ extensionsToFlags (compiler lbi) (usedExtensions bi)
++ programOverrideArgs ghcProg)),
variablePrefix ++ "_CC_OPTS = " ++ unwords (ccOptions bi),
variablePrefix ++ "_CPP_OPTS = " ++ unwords (cppOptions bi),
variablePrefix ++ "_LD_OPTS = " ++ unwords (ldOptions bi),
variablePrefix ++ "_DEP_INCLUDE_DIRS_SINGLE_QUOTED = " ++ unwords wrappedIncludeDirs,
variablePrefix ++ "_DEP_CC_OPTS = " ++ unwords (forDeps Installed.ccOptions),
variablePrefix ++ "_DEP_LIB_DIRS_SINGLE_QUOTED = " ++ unwords wrappedLibraryDirs,
variablePrefix ++ "_DEP_LIB_DIRS_SEARCHPATH = " ++ mkSearchPath libraryDirs,
variablePrefix ++ "_DEP_LIB_REL_DIRS = " ++ unwords libraryRelDirs,
variablePrefix ++ "_DEP_LIB_REL_DIRS_SEARCHPATH = " ++ mkSearchPath libraryRelDirs,
variablePrefix ++ "_DEP_EXTRA_LIBS = " ++ unwords (forDeps Installed.extraLibraries),
variablePrefix ++ "_DEP_LD_OPTS = " ++ unwords (forDeps Installed.ldOptions),
variablePrefix ++ "_BUILD_GHCI_LIB = " ++ boolToYesNo (withGHCiLib lbi),
"",
-- Sometimes we need to modify the automatically-generated package-data.mk
-- bindings in a special way for the GHC build system, so allow that here:
"$(eval $(" ++ directory ++ "_PACKAGE_MAGIC))"
]
writeFile (distdir ++ "/package-data.mk") $ unlines xs
writeFileUtf8 (distdir ++ "/haddock-prologue.txt") $
if null (description pd) then synopsis pd
else description pd
unless (null dll0Modules) $
do let dll0Mods = words dll0Modules
dllMods = allMods \\ dll0Mods
dllModSets = map unwords [dll0Mods, dllMods]
writeFile (distdir ++ "/dll-split") $ unlines dllModSets
where
escape = foldr (\c xs -> if c == '#' then '\\':'#':xs else c:xs) []
wrap = mapM wrap1
wrap1 s
| null s = die ["Wrapping empty value"]
| '\'' `elem` s = die ["Single quote in value to be wrapped:", s]
-- We want to be able to assume things like <space><quote> is the
-- start of a value, so check there are no spaces in confusing
-- positions
| head s == ' ' = die ["Leading space in value to be wrapped:", s]
| last s == ' ' = die ["Trailing space in value to be wrapped:", s]
| otherwise = return ("\'" ++ s ++ "\'")
mkSearchPath = intercalate [searchPathSeparator]
boolToYesNo True = "YES"
boolToYesNo False = "NO"
-- | Version of 'writeFile' that always uses UTF8 encoding
writeFileUtf8 f txt = withFile f WriteMode $ \hdl -> do
hSetEncoding hdl utf8
hPutStr hdl txt
| acowley/ghc | utils/ghc-cabal/Main.hs | bsd-3-clause | 25,173 | 0 | 23 | 8,987 | 4,919 | 2,515 | 2,404 | 410 | 13 |
{-
Type table: list of all register types
Part of Mackerel: a strawman device definition DSL for Barrelfish
Copyright (c) 2007, 2008, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
-}
module TypeTable where
import MackerelParser
import Attr
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Pos
import qualified Fields as F
import qualified TypeName as TN
{--------------------------------------------------------------------
--------------------------------------------------------------------}
data Val = Val { cname :: String,
cval :: Expr,
cdesc :: String,
ctype :: TN.Name,
cpos :: SourcePos }
deriving Show
data Rec = RegFormat { tt_name :: TN.Name,
tt_size :: Integer,
fields :: [F.Rec],
tt_desc :: String,
pos :: SourcePos }
| DataFormat { tt_name :: TN.Name,
tt_size :: Integer,
fields :: [F.Rec],
tt_desc :: String,
wordsize :: Integer,
pos :: SourcePos }
| ConstType { tt_name :: TN.Name,
tt_size :: Integer,
tt_vals :: [ Val ],
tt_width :: Maybe Integer,
tt_desc :: String,
pos :: SourcePos }
| Primitive { tt_name :: TN.Name,
tt_size :: Integer,
tt_attr :: Attr }
deriving Show
type_name :: Rec -> String
type_name r = TN.toString $ tt_name r
devname :: Rec -> String
devname r = TN.devName $ tt_name r
type_kind :: Rec -> String
type_kind RegFormat {} = "Register"
type_kind DataFormat {} = "Data"
type_kind ConstType {} = "Constant"
type_kind Primitive {} = "Primitive"
-- Is this a primitive (i.e. non-record-like) type. A key issue here
-- is that this includes constants types; otherwise this is equivalent
-- to is_builtin below.
is_primitive :: Rec -> Bool
is_primitive Primitive {} = True
is_primitive ConstType {} = True
is_primitive _ = False
is_builtin :: Rec -> Bool
is_builtin Primitive { tt_name = n } = TN.is_builtin_type n
is_builtin _ = False
builtin_size :: String -> Integer
builtin_size "uint8" = 8
builtin_size "uint16" = 16
builtin_size "uint32" = 32
builtin_size "uint64" = 64
make_rtypetable :: DeviceFile -> [Rec]
make_rtypetable (DeviceFile (Device devname bitorder _ _ decls) _) =
(concat [ make_rtrec d devname bitorder | d <- decls ])
++
[ Primitive (TN.fromParts devname ("uint" ++ (show w))) w NOATTR
| w <- [ 8, 16, 32, 64 ] ]
make_rtrec :: AST -> String -> BitOrder -> [Rec]
make_rtrec (RegType nm dsc (TypeDefn decls) p) dev order =
[ RegFormat { tt_name = TN.fromParts dev nm,
tt_size = (calc_tt_size decls),
fields = F.make_list dev NOATTR order 0 decls,
tt_desc = dsc,
pos = p } ]
make_rtrec (Register nm tt_attrib _ _ dsc (TypeDefn decls) p) dev order =
[ RegFormat { tt_name = TN.fromParts dev nm,
tt_size = (calc_tt_size decls),
fields = F.make_list dev tt_attrib order 0 decls,
tt_desc = "Implicit type of " ++ dsc ++ " register",
pos = p } ]
make_rtrec (RegArray nm tt_attrib _ _ _ dsc (TypeDefn decls) p) dev order =
[ RegFormat { tt_name = TN.fromParts dev nm,
tt_size = (calc_tt_size decls),
fields = F.make_list dev NOATTR order 0 decls,
tt_desc = "Implicit type of " ++ dsc ++ " register array",
pos = p } ]
make_rtrec (DataType nm dsc (TypeDefn decls) o w p) dev devorder =
let order = if o == NOORDER then devorder else o
sz = calc_tt_size decls
in
[ DataFormat { tt_name = TN.fromParts dev nm,
tt_size = sz,
fields = F.make_list dev RW order w decls,
tt_desc = dsc,
wordsize = if w == 0 then sz else w,
pos = p } ]
make_rtrec (Constants nm d vs w p) dev devorder =
let tn = TN.fromParts dev nm
vl = [ make_val tn v | v <- vs ]
in
[ ConstType { tt_name = tn,
tt_size = case w of
Nothing -> calc_const_size vl
Just t -> t,
tt_vals = vl,
tt_desc = d,
tt_width = w,
pos = p } ]
make_rtrec _ _ _ = []
calc_const_size :: [Val] -> Integer
calc_const_size vs =
let m = maximum [ i | t@Val { cval = (ExprConstant i) } <- vs ]
in
if m <= 0xff then 8
else if m <= 0xffff then 16
else if m <= 0xffffffff then 32
else 64
-- Building constant lists
make_val :: TN.Name -> AST -> Val
make_val tn (ConstVal i e d p)
= Val { cname = i, cval = e, cdesc = d, ctype = tn, cpos = p }
calc_tt_size :: [AST] -> Integer
calc_tt_size decls = sum [ sz | (RegField _ sz _ _ _ _) <- decls ]
get_rtrec :: [Rec] -> TN.Name -> Rec
get_rtrec rtinfo nm =
let l = [ rt | rt <- rtinfo, (tt_name rt) == nm ]
in
if (length l) > 0
then head l
else RegFormat { tt_name = TN.null,
tt_size = 32,
fields = [],
tt_desc = "Failed to find type" ++ show nm,
pos = initialPos "no file" }
| daleooo/barrelfish | tools/mackerel/TypeTable.hs | mit | 5,785 | 0 | 18 | 2,083 | 1,561 | 870 | 691 | 123 | 4 |
{-# LANGUAGE OverlappingInstances #-}
{-# OPTIONS_GHC -w #-} -- Turn off deprecation for OverlappingInstances
-- | Unsafe as uses overlapping instances
-- Although it isn't defining any so can we mark safe
-- still?
module UnsafeInfered08_A where
g :: Int
g = 1
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/safeHaskell/safeInfered/UnsafeInfered08_A.hs | bsd-3-clause | 265 | 0 | 4 | 46 | 20 | 15 | 5 | 5 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -Wall #-}
module Main where
import System.Environment
import GHC
main :: IO ()
main = do
flags <- getArgs
runGhc' flags $ do
setTargets [Target (TargetFile "T10052-input.hs" Nothing) True Nothing]
_success <- load LoadAllTargets
return ()
runGhc' :: [String] -> Ghc a -> IO a
runGhc' args act = do
let libdir = head args
flags = tail args
(dynFlags, _warns) <- parseStaticFlags (map noLoc flags)
runGhc (Just libdir) $ do
dflags0 <- getSessionDynFlags
(dflags1, _leftover, _warns) <- parseDynamicFlags dflags0 dynFlags
let dflags2 = dflags1 {
hscTarget = HscInterpreted
, ghcLink = LinkInMemory
, verbosity = 1
}
_newPkgs <- setSessionDynFlags dflags2
act
| urbanslug/ghc | testsuite/tests/ghc-api/T10052/T10052.hs | bsd-3-clause | 837 | 0 | 14 | 241 | 250 | 123 | 127 | 26 | 1 |
module ProjectEuler.Problem42
( problem
) where
import Data.Char
import qualified Data.Text as T
import ProjectEuler.GetData
problem :: Problem
problem = pureProblemWithData "p042_words.txt" 42 Solved compute
wordValue :: String -> Int
wordValue word = sum $ toValue <$> word
where
toValue c = ord c - ord 'A' + 1
triangleNumbers :: [Int]
triangleNumbers = snd <$> iterate (\(i,f) -> (i+1, i+f+1)) (1,1)
compute :: T.Text -> Int
compute raw = length $ filter (`elem` rangedTriangleNumbers) valueList
where
wordList = read $ "[" ++ T.unpack raw ++ "]"
valueList = wordValue <$> wordList
vMax = maximum valueList
rangedTriangleNumbers = takeWhile (<= vMax) triangleNumbers
| Javran/Project-Euler | src/ProjectEuler/Problem42.hs | mit | 705 | 0 | 11 | 136 | 246 | 136 | 110 | 18 | 1 |
{-# htermination (fromEnumMyBool :: MyBool -> MyInt) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
fromEnumMyBool :: MyBool -> MyInt
fromEnumMyBool MyFalse = Pos Zero;
fromEnumMyBool MyTrue = Pos (Succ Zero);
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/fromEnum_3.hs | mit | 334 | 0 | 8 | 73 | 104 | 59 | 45 | 8 | 1 |
module CovenantEyes.Api.Internal.Time
( UTCTime, addUTCTime
, Clk.TimeSpec
, Nanosec(..), timeSpecAsNanosec, nanosecAsTimeSpec, addTimeOffset, nanoFactor
) where
import CovenantEyes.Api.Internal.Prelude
import Data.Time.Clock (UTCTime, addUTCTime)
import System.Clock as Clk
newtype Nanosec = Nanosec Integer deriving (Enum,Eq,Integral,Num,Ord,Real,Show)
timeSpecAsNanosec :: Clk.TimeSpec -> Nanosec
timeSpecAsNanosec = Nanosec . Clk.timeSpecAsNanoSecs
nanosecAsTimeSpec :: Nanosec -> Clk.TimeSpec
nanosecAsTimeSpec (Nanosec count) = TimeSpec (fromIntegral sec) (fromIntegral nano)
where (sec, nano) = count `quotRem` nanoFactor
addTimeOffset :: Nanosec -> UTCTime -> UTCTime
addTimeOffset (Nanosec nanosec) = addUTCTime (realToFrac nanosec / nanoFactor)
nanoFactor :: Num a => a
nanoFactor = 1000000000
| 3noch/covenanteyes-api-hs | src/CovenantEyes/Api/Internal/Time.hs | mit | 820 | 0 | 8 | 104 | 248 | 141 | 107 | 17 | 1 |
-- | A helper module which takes care of parallelism
{-# LANGUAGE DeriveDataTypeable #-}
module Test.Tasty.Parallel (runInParallel) where
import Control.Monad
import Control.Concurrent
import Control.Concurrent.STM
import Control.Exception
import Foreign.StablePtr
import Data.Typeable
import GHC.Conc (labelThread)
data Interrupt = Interrupt
deriving Typeable
instance Show Interrupt where
show Interrupt = "interrupted"
instance Exception Interrupt
data ParThreadKilled = ParThreadKilled SomeException
deriving Typeable
instance Show ParThreadKilled where
show (ParThreadKilled exn) =
"tasty: one of the test running threads was killed by: " ++
show exn
instance Exception ParThreadKilled
shutdown :: ThreadId -> IO ()
shutdown = flip throwTo Interrupt
-- | Take a list of actions and execute them in parallel, no more than @n@
-- at the same time.
--
-- The action itself is asynchronous, ie. it returns immediately and does
-- the work in new threads. It returns an action which aborts tests and
-- cleans up.
runInParallel
:: Int -- ^ maximum number of parallel threads
-> [IO ()] -- ^ list of actions to execute
-> IO (IO ())
-- This implementation tries its best to ensure that exceptions are
-- properly propagated to the caller and threads are not left running.
--
-- Note that exceptions inside tests are already caught by the test
-- actions themselves. Any exceptions that reach this function or its
-- threads are by definition unexpected.
runInParallel nthreads actions = do
callingThread <- myThreadId
-- Don't let the main thread be garbage-collected
-- Otherwise we may get a "thread blocked indefinitely in an STM
-- transaction" exception when a child thread is blocked and GC'd.
-- (See e.g. https://github.com/feuerbach/tasty/issues/15)
_ <- newStablePtr callingThread
-- A variable containing all ThreadIds of forked threads.
--
-- These are the threads we'll need to kill if something wrong happens.
pidsVar <- atomically $ newTVar []
-- If an unexpected exception has been thrown and we started killing all
-- the spawned threads, this flag will be set to False, so that any
-- freshly spawned threads will know to terminate, even if their pids
-- didn't make it to the "kill list" yet.
aliveVar <- atomically $ newTVar True
let
-- Kill all threads.
shutdownAll :: IO ()
shutdownAll = do
pids <- atomically $ do
writeTVar aliveVar False
readTVar pidsVar
-- be sure not to kill myself!
me <- myThreadId
mapM_ shutdown $ filter (/= me) pids
cleanup :: Either SomeException () -> IO ()
cleanup Right {} = return ()
cleanup (Left exn)
| Just Interrupt <- fromException exn
-- I'm being shut down either by a fellow thread (which caught an
-- exception), or by the main thread which decided to stop running
-- tests. In any case, just end silently.
= return ()
| otherwise = do
-- Wow, I caught an exception (most probably an async one,
-- although it doesn't really matter). Shut down all other
-- threads, and re-throw my exception to the calling thread.
shutdownAll
throwTo callingThread $ ParThreadKilled exn
forkCarefully :: IO () -> IO ThreadId
forkCarefully action = flip myForkFinally cleanup $ do
-- We cannot check liveness and update the pidsVar in one
-- transaction before forking, because we don't know the new pid yet.
--
-- So we fork and then check/update. If something has happened in
-- the meantime, it's not a big deal — we just cancel. OTOH, if
-- we're alive at the time of the transaction, then we add our pid
-- and will be killed when something happens.
newPid <- myThreadId
join . atomically $ do
alive <- readTVar aliveVar
if alive
then do
modifyTVar pidsVar (newPid :)
return action
else
return (return ())
capsVar <- atomically $ newTVar nthreads
let
go a cont = join . atomically $ do
caps <- readTVar capsVar
if caps > 0
then do
writeTVar capsVar $! caps - 1
let
release = atomically $ modifyTVar' capsVar (+1)
-- Thanks to our exception handling, we won't deadlock even if
-- an exception strikes before we 'release'. Everything will be
-- killed, so why bother.
return $ do
pid <- forkCarefully (do a; release)
labelThread pid "tasty_test_thread"
cont
else retry
-- fork here as well, so that we can move to the UI without waiting
-- untill all tests have finished
pid <- forkCarefully $ foldr go (return ()) actions
labelThread pid "tasty_thread_manager"
return shutdownAll
-- Copied from base to stay compatible with GHC 7.4.
myForkFinally :: IO a -> (Either SomeException a -> IO ()) -> IO ThreadId
myForkFinally action and_then =
mask $ \restore ->
forkIO $ try (restore action) >>= and_then
| SAdams601/ParRegexSearch | test/tasty-0.9.0.1/Test/Tasty/Parallel.hs | mit | 5,066 | 0 | 23 | 1,273 | 812 | 409 | 403 | 79 | 4 |
-- Merges two sorted lists in ascending order to give
-- a single sorted list in ascending order, e.g.,
-- > merge [2, 5, 6] [1, 3, 4]
-- [1, 2, 3, 4, 5, 6]
merge :: Ord a => [a] -> [a] -> [a]
merge xs [] = xs
merge [] ys = ys
merge (x:xs) (y:ys)
| x <= y = x : merge xs (y:ys)
| otherwise = y : merge (x:xs) ys
| calebgregory/fp101x | wk3/merge.hs | mit | 352 | 0 | 9 | 116 | 133 | 69 | 64 | 6 | 1 |
module FrameBuffer where
import Foreign
import Graphics.Rendering.OpenGL.Raw
createFrameBuffer textureId renderBufferId = do
bufferPointer <- malloc :: IO (Ptr GLuint)
glGenFramebuffers 1 bufferPointer
bufferId <- peek bufferPointer
glBindFramebuffer gl_FRAMEBUFFER bufferId
glFramebufferTexture2D gl_FRAMEBUFFER gl_COLOR_ATTACHMENT0 gl_TEXTURE_2D textureId 0
glFramebufferRenderbuffer gl_FRAMEBUFFER gl_DEPTH_ATTACHMENT gl_RENDERBUFFER renderBufferId
status <- glCheckFramebufferStatus gl_FRAMEBUFFER
verifyFrameBuffer status
glBindFramebuffer gl_FRAMEBUFFER 0
return bufferId
verifyFrameBuffer status | status == gl_FRAMEBUFFER_INCOMPLETE_ATTACHMENT = error "[Framebuffer] Failed because of an incomplete attachment"
| status == gl_FRAMEBUFFER_INCOMPLETE_DIMENSIONS = error "[Framebuffer] Failed because of incomplete dimensions"
| status == gl_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = error "[Framebuffer] Failed because of a missing attachment"
| status == gl_FRAMEBUFFER_UNSUPPORTED = error "[Framebuffer] Failed because of it is unsupported"
| status == gl_FRAMEBUFFER_COMPLETE = putStrLn "[Framebuffer] Created successfully"
| otherwise = error "[Framebuffer] There was an error with the framebuffer"
| MichaelBaker/opengl-haskell | src/FrameBuffer.hs | mit | 1,464 | 0 | 10 | 378 | 222 | 98 | 124 | 20 | 1 |
module Bonawitz_3_10b
where
import Blaze
import Tree
import Control.Monad.Random
-- Replicating instance of Bonawitz 3.10b
-- Dual Link Kernel
sr :: State
sr = collectStates dummyState [so, csc]
so :: State
so = mkDoubleParam "Omega" 1.23
csc :: State
csc = (collectStates dummyCState (map mkComp [0..2])) `tagNode` "cc"
mkComp :: Int -> State
mkComp i = (collectStates dummyCState [sp,csx]) `tagNode`
("c" ++ show i)
where csx = collectStates dummyCState $ map mkIntData [i,i+1]
sp = mkDoubleParam "Theta" (0.2 * (fromIntegral i))
makeMachine :: (RandomGen g) => Rand g Machine
makeMachine = do
e <- getRandoms
return $ Machine sr dummyDensity (mkRDDLKernel ["cc"] "c2" "c1") e
main :: IO ()
main = do
m <- evalRandIO $ makeMachine
let sim = iterate sampleMach m
let out = putStrLn . show . map snd . summarizeComps . getTagAt "cc" [] . ms
mapM_ out (take 100 sim) | othercriteria/blaze | Bonawitz_3_10b.hs | mit | 948 | 0 | 14 | 227 | 345 | 181 | 164 | 25 | 1 |
{-# LANGUAGE TypeOperators, GADTs, KindSignatures,
TypeSynonymInstances, FlexibleInstances, PatternGuards #-}
module Tactics where
import Data.Either
import PigmentPrelude
fromRight :: Either a b -> b
fromRight (Right b) = b
opRunRunner :: Op -> [VAL] -> (VAL -> Bool) -> String -> Either String ()
opRunRunner op vals judge msg =
case opRun op vals of
Left new -> Left (show new)
Right val -> case judge val of
False -> Left msg
True -> Right ()
-- Enum
-- ----
--
-- branches is supposed to build the following term:
branchesOpRun :: t -> VAL -> VAL -> INTM
branchesOpRun t e' p = TIMES
(p $$ A ZE)
(branchesOp @@ [e' , L (H ((B0 :< p), [])
""
(N (V 1 :$ A ((C (Su (N (V 0))))))))])
-- Let's test it:
testBranches :: Either String ()
testBranches =
let t = N (P ([("",0)] := DECL :<: UID))
e'= N (P ([("",1)] := DECL :<: enumU))
p = N (P ([("",2)] := DECL :<: ARR (ENUMT (CONSE t e')) SET))
typ = SET
orig = branchesOpRun t e' p
judge val = equal (typ :>: (val, orig)) (B0, 3)
in opRunRunner branchesOp [CONSE t e', p] judge "branches not equal!"
-- switch is supposed to build the following term:
switchOpRun t e' p ps n =
switchOp @@ [e'
, L (H ((B0 :< p), [])
""
(N (V 1 :$ A ((C (Su (N (V 0))))))))
, ps $$ Snd
, n ]
-- Let's test it:
testSwitch = opRunRunner switchOp [CONSE t e' , p , ps , SU n]
judge "switch didn't work :("
where t = N (P ([("",0)] := DECL :<: UID))
e'= N (P ([("",1)] := DECL :<: enumU))
p = N (P ([("",2)] := DECL :<: ARR (ENUMT (CONSE t e')) SET))
ps = N (P ([("",3)] := DECL :<: branchesOp @@ [CONSE t e', ARR (ENUMT (CONSE t e')) SET]))
n = N (P ([("",4)] := DECL :<: ENUMT (CONSE t e')))
typ = SET
orig = switchOpRun t e' p ps n
judge val = equal (typ :>: (val, orig)) (B0, 5)
-- Desc
-- ----
-- Desc on Arg is supposed to build this term:
argDescRun x y z =
let x' = 2
y' = 1
z' = 0
expr =
let x' = 3
y' = 2
z' = 1
a' = 0
in SIGMA (NV x') . L $
"" :. (N (descOp :@ [y' $# [a'], NV z']))
in eval expr (B0 :< x :< y :< z, [])
-- Let's test it:
-- testDescArg = equal (typ :>: (fromRight $ withTac, orig)) (B0,3)
-- where x = N (P ([("",0)] := DECL :<: SET))
-- y = N (P ([("",1)] := DECL :<: ARR x desc))
-- z = N (P ([("",2)] := DECL :<: SET))
-- typ = SET
-- withTac = opRun descOp [ARG x y, z]
-- orig = argDescRun x y z
-- Ind is supposed to build this term:
indDescRun x y z = TIMES (ARR x z) (descOp @@ [y,z])
-- Let's test it:
-- testDescInd = equal (typ :>: (fromRight $ withTac, orig)) (B0,3)
-- where x = N (P ([("",0)] := DECL :<: SET))
-- y = N (P ([("",1)] := DECL :<: desc))
-- z = N (P ([("",2)] := DECL :<: SET))
-- typ = SET
-- withTac = opRun descOp [IND x y, z]
-- orig = indDescRun x y z
-- Just check that we can use Ind1:
-- testDescInd1 = isRight withTac
-- where x = N (P ([("",1)] := DECL :<: desc))
-- z = N (P ([("",2)] := DECL :<: SET))
-- typ = SET
-- withTac = opRun descOp [IND1 x, z]
-- Box on an Arg is supposed to build this term:
boxArgRun a f d p v = boxOp @@ [f $$ A (v $$ Fst),d,p,v $$ Snd]
-- Let's test it:
-- testBoxArg = equal (typ :>: (fromRight $ withTac, orig)) (B0,5)
-- where a = N (P ([("",0)] := DECL :<: SET))
-- f = N (P ([("",1)] := DECL :<: ARR a desc))
-- d = N (P ([("",2)] := DECL :<: SET))
-- p = N (P ([("",3)] := DECL :<: ARR d SET))
-- v = N (P ([("",4)] := DECL :<: descOp @@ [ARG a f, p]))
-- typ = SET
-- withTac = opRun boxOp [ARG a f, d, p, v]
-- orig = boxArgRun a f d p v
-- Box on an Ind is supposed to build this term:
-- boxIndRun h x d p v =
-- eval [.h.x.d.p.v.
-- TIMES (C (Pi (NV h) . L $ "" :. [.y.
-- N (V p :$ A (N (V v :$ Fst :$ A (NV y))))]))
-- (N (boxOp :@ [NV x,NV d,NV p,N (V v :$ Snd)]))
-- ] $ B0 :< h :< x :< d :< p :< v
-- Let's test it:
-- testBoxInd = equal (typ :>: (fromRight $ withTac, orig)) (B0,5)
-- where h = N (P ([("",0)] := DECL :<: SET))
-- x = N (P ([("",1)] := DECL :<: desc))
-- d = N (P ([("",2)] := DECL :<: SET))
-- p = N (P ([("",3)] := DECL :<: ARR d SET))
-- v = N (P ([("",4)] := DECL :<: descOp @@ [IND h x, p]))
-- typ = SET
-- withTac = opRun boxOp [IND h x, d, p, v]
-- orig = boxIndRun h x d p v
-- Just check that box does something on Ind1:
-- testBoxInd1 = isRight withTac
-- where x = N (P ([("",1)] := DECL :<: desc))
-- d = N (P ([("",2)] := DECL :<: SET))
-- p = N (P ([("",3)] := DECL :<: ARR d SET))
-- v = N (P ([("",4)] := DECL :<: descOp @@ [IND1 x, p]))
-- typ = SET
-- withTac = opRun boxOp [IND1 x, d, p, v]
-- Mapbox on an Arg is supposed to build this term:
mapboxArgRun a f d bp p v =
mapBoxOp @@ [f $$ (A (v $$ Fst)),d,bp,p,v $$ Snd]
-- Let's test it:
-- testMapboxArg = equal (typ :>: (fromRight $ withTac, orig)) (B0,6)
-- where a = N (P ([("",0)] := DECL :<: SET))
-- f = N (P ([("",1)] := DECL :<: ARR a desc))
-- d = N (P ([("",2)] := DECL :<: SET))
-- bpv = N (P ([("",3)] := DECL :<: ARR d SET))
-- p = N (P ([("",4)] := DECL :<: (C (Pi d (eval [.bpv. L $ "" :.
-- [.y. N (V bpv :$ A (NV y))]
-- ] $ B0 :< bpv)))))
-- v = N (P ([("",5)] := DECL :<: descOp @@ [ARG a f, d]))
-- typ = boxOp @@ [ARG a f, d, bpv,v]
-- withTac = opRun mapBoxOp [ARG a f, d, bpv, p, v]
-- orig = mapboxArgRun a f d bpv p v
-- Mapbox on an Ind is supposed to build this term:
-- mapboxIndRun h x d bp p v =
-- eval [.h.x.d.bp.p.v.
-- PAIR (L $ "" :. [.y. N (V p :$ A (N (V v :$ Fst :$ A (NV y))))])
-- (N (mapBoxOp :@ [NV x,NV d
-- ,NV bp
-- ,NV p
-- ,N (V v :$ Snd)
-- ]))
-- ] $ B0 :< h :< x :< d :< bp :< p :< v
-- Test:
-- testMapboxInd = equal (typ :>: (fromRight $ withTac, orig)) (B0,6)
-- where h = N (P ([("",0)] := DECL :<: SET))
-- x = N (P ([("",1)] := DECL :<: desc))
-- d = N (P ([("",2)] := DECL :<: SET))
-- bpv = N (P ([("",3)] := DECL :<: ARR d SET))
-- p = N (P ([("",4)] := DECL :<: (C (Pi d (eval [.bpv. L $ "" :.
-- [.y. N (V bpv :$ A (NV y))]
-- ] $ B0 :< bpv)))))
-- v = N (P ([("",5)] := DECL :<: descOp @@ [IND h x, d]))
-- typ = boxOp @@ [IND h x, d, bpv,v]
-- withTac = opRun mapBoxOp [IND h x, d, bpv, p, v]
-- orig = mapboxIndRun h x d bpv p v
-- Just check that mapBox build something with Ind1:
-- testMapboxInd1 = isRight withTac
-- where x = N (P ([("",1)] := DECL :<: desc))
-- d = N (P ([("",2)] := DECL :<: SET))
-- bpv = N (P ([("",3)] := DECL :<: ARR d SET))
-- p = N (P ([("",4)] := DECL :<: (C (Pi d (eval [.bpv. L $ "" :.
-- [.y. N (V bpv :$ A (NV y))]
-- ] $ B0 :< bpv)))))
-- v = N (P ([("",5)] := DECL :<: descOp @@ [IND1 x, d]))
-- typ = boxOp @@ [IND1 x, d, bpv,v]
-- withTac = opRun mapBoxOp [IND1 x, d, bpv, p, v]
-- elimOp is supposed to build this term:
-- elimRun d bp p v =
-- p $$ A v $$ A (mapBoxOp @@
-- [d
-- ,MU d
-- ,bp
-- ,eval [.d.bp.p. L $ "" :. [.x.
-- N (elimOp :@ [NV d,NV bp,NV p,NV x])]
-- ] $ B0 :< d :< bp :< p
-- ,v])
-- Let's test now:
-- testElim = equal (typ :>: (fromRight $ withTac, orig)) (B0,6)
-- where d = N (P ([("",0)] := DECL :<: desc))
-- bp = (P ([("",1)] := DECL :<: ARR (MU d) SET))
-- bpv = N bp
-- p = N (P ([("",2)] := DECL :<: (C (Pi (descOp @@ [d,MU d])
-- (eval [.d.bp. L $ "" :. [.x.
-- ARR (N (boxOp :@ [NV d,MU (NV d),NV bp,NV x]))
-- (N (V bp :$ A (CON (NV x))))]
-- ] $ B0 :< d :< bpv)))))
-- v = N (P ([("",3)] := DECL :<: (descOp @@ [d, MU d])))
-- typ = N (bp :$ A (MU v))
-- withTac = opRun elimOp [d, bpv, p, CON v]
-- orig = elimRun d bpv p v
-- Equality
-- --------
--
-- Be green on a Pi:
-- eqGreenPiRun s1 t1 f1 s2 t2 f2 =
-- eval [.s1.t1.f1.s2.t2.f2.
-- ALL (NV s1) . L $ "" :. [.x1.
-- ALL (NV s2) . L $ "" :. [.x2.
-- IMP (EQBLUE (NV s2 :>: NV x2) (NV s1 :>: NV x1))
-- (eqGreenT (t1 $# [x1] :>: f1 $# [x1]) (t2 $# [x2] :>: f2 $# [x2]))
-- ]]]
-- $ B0 :< s1 :< t1 :< f1 :< s2 :< t2 :< f2
-- I don't believe it:
-- testEqGreenPi = equal (typ :>: (fromRight $ withTac, orig)) (B0,6)
-- where s1 = N (P ([("",0)] := DECL :<: SET))
-- t1 = N (P ([("",1)] := DECL :<: (ARR s1 SET)))
-- f1 = N (P ([("",2)] := DECL :<: (C $ Pi s1 t1)))
-- s2 = N (P ([("",3)] := DECL :<: SET))
-- t2 = N (P ([("",4)] := DECL :<: (ARR s2 SET)))
-- f2 = N (P ([("",5)] := DECL :<: (C $ Pi s2 t2)))
-- typ = PROP
-- withTac = opRun eqGreen [C (Pi s1 t1),f1,C (Pi s2 t2),f2]
-- orig = eqGreenPiRun s1 t1 f1 s2 t2 f2
-- Testing
-- -------
-- > main = do
-- > putStrLn $ "Is branches ok? " ++ show testBranches
-- > putStrLn $ "Is switch ok? " ++ show testSwitch
-- > putStrLn $ "Is desc arg ok? " ++ show testDescArg
-- > putStrLn $ "Is desc ind ok? " ++ show testDescInd
-- > putStrLn $ "Is desc ind1 ok? " ++ show testDescInd1
-- > putStrLn $ "Is box arg ok? " ++ show testBoxArg
-- > putStrLn $ "Is box ind ok? " ++ show testBoxInd
-- > putStrLn $ "Is box ind1 ok? " ++ show testBoxInd1
-- > putStrLn $ "Is mapBox arg ok? " ++ show testMapboxArg
-- > putStrLn $ "Is mapBox ind ok? " ++ show testMapboxInd
-- > putStrLn $ "Is mapBox ind1 ok? " ++ show testMapboxInd1
-- > putStrLn $ "Is elim ok ? " ++ show testElim
-- > putStrLn $ "Is eqGreen Pi ok ? " ++ show testEqGreenPi
| kwangkim/pigment | tests/Tactics.hs | mit | 11,107 | 0 | 25 | 4,359 | 1,398 | 824 | 574 | 62 | 3 |
{-# LANGUAGE ScopedTypeVariables, DeriveDataTypeable, OverloadedStrings #-}
module Happstack.Crud where
import Data.Typeable
import Data.Data
import Data.Generics.Aliases
import Control.Monad.Reader
import Data.Generics
import Data.Maybe
import Data.Tree
import Text.Blaze.Html5 ((!), toValue)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Blaze (Html)
import Text.Blaze.Renderer.Pretty (renderHtml)
import Happstack.Lite -- (ServerPart, Response, ok, notFound, toResponse)
import Monad (zipWithM_)
import Data.Text (Text)
import Char (toUpper,toLower)
-- Common Types
type Template = Html -> Response
type Fields = [String]
-- genericCreateOf
-- Provides a read-only view on a type
genericReadViewOf :: Data d => d -> Template -> Fields -> ServerPart Response
genericReadViewOf value template exclude =
let
ValueInfo name fields values = extractInfo value
toDiv k v =
if k `elem` exclude then
no
else
do H.div ! A.class_ "row" $ do
H.div ! A.class_ "label" $ H.toHtml $ formatTitle k
H.div ! A.class_ "value" $ H.toHtml v
in
ok $ template $ do
H.h2 $ H.toHtml name
zipWithM_ toDiv fields values
-- Provides Edit functionality
-- TODO: more detailed generation for different types
-- TODO: add the concept of an id field
-- TODO: figure out how to cast things
genericUpdateViewOf :: (Show d, Data d) => d -> Template -> ServerPart Response
genericUpdateViewOf value template = msum [ viewForm, processForm ]
where
viewForm =
let
ValueInfo name fields values = extractInfo value
toField k v =
H.div ! A.class_ "clearfix" $ do
H.label ! A.for ka $ kh
H.div ! A.class_ "input" $ H.input ! A.type_ "text" ! A.name ka ! A.id ka ! A.value va
where kh = H.toHtml k
ka = H.toValue k
va = H.toValue v
in
do method GET
ok $ template $ do
H.div ! A.class_ "row" $ H.form ! A.method "post" $ H.fieldset $ do
H.legend $ H.toHtml name
zipWithM_ toField fields values
H.div ! A.class_ "actions" $ H.input ! A.class_ "btn primary" ! A.type_ "submit" ! A.value "save"
processForm = do
method POST
let ValueInfo name fields values = extractInfo value
mapM_ (\f -> lookText f >>= \x -> liftIO $ print x) fields
genericReadViewOf value template []
-- genericListOf
-- genericDeleteOf
-- TODO: work out how to abstract over the Update & Query types from acid-state
-- Are these sensible types?
{-
type CreateData = Data d => ServerPart d
type UpdateData = Data d => ServerPart d -> ServerPart d
type DeleteData = Data d => ServerPart ()
-}
-- Is it possible to do something like this or do I need template haskell?
-- genericCrudOf :: String -> CreateData -> UpdateData -> DeleteData ->
{-
genericCrudOf prefix create update delete = msum
[
dir $ prefix ++ "/create" $ genericCreateOf
dir $ prefix ++ "/read" $ genericReadOf
dir $ prefix ++ "/update" $ genericUpdateOf
dir $ prefix ++ "/list" $ genericListOf
]
-}
-- Utility Functions
formatTitle :: String -> String
formatTitle str = (toUpper $ head str) : (tail str)
type R a = a -> String
render :: Data a => R a
render = (showConstr . toConstr) `extQ` (renderShow :: R Text)
-- TODO: figure out how to abstract over type classes as well as types
renderShow :: Show a => R a
renderShow x = let str = show x in take (length str - 2 ) $ drop 1 str
data ValueInfo = ValueInfo String [String] [String]
extractInfo :: Data d => d -> ValueInfo
extractInfo value =
let
cons = toConstr value
fields = constrFields cons
values = gmapQ render value
in
ValueInfo (show cons) fields values
no = return ()
-- De-trealise Tree to Data
tree2data :: Data a => Tree String -> Maybe a
tree2data = gdefault `extR` atString
where
atString (Node x []) = Just x
gdefault (Node x ts) = res
where
-- a helper for type capture
res = maybe Nothing (kids . fromConstr) con
-- the type to constructed
ta = fromJust res
-- construct constructor
con = readConstr (dataTypeOf ta) x
-- recursion per kid with accumulation
perkid ts = const (tail ts, tree2data (head ts))
-- recurse into kids
kids x =
do guard (glength x == length ts)
snd (gmapAccumM perkid ts x)
-- Trealise Data to Tree
data2tree :: Data a => a -> Tree String
data2tree = gdefault `extQ` atString
where
atString (x::String) = Node x []
gdefault x = Node (render x) (gmapQ data2tree x)
| TheFrameworkGame/blog-haskell-happstack-attempt | Happstack/Crud.hs | mit | 5,005 | 0 | 23 | 1,516 | 1,296 | 661 | 635 | 87 | 2 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes, BangPatterns, ScopedTypeVariables #-}
module Main where
import Codec.Xlsx
import Data.Text (Text, take)
import Control.Applicative
import qualified Data.Map.Strict as M
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import Data.ByteString.Lazy as BL hiding (map, intersperse, zip, concat)
import qualified Data.ByteString.Lazy.Char8 as L8
import System.Time
import Data.Time.Clock.POSIX (getPOSIXTime)
import System.Environment (getArgs)
import Data.Aeson
import Data.Monoid
import Data.List (intersperse, zip)
import qualified Data.Attoparsec.Text as AT
import Data.Attoparsec.Lazy as Atto hiding (Result)
import Data.Attoparsec.ByteString.Char8 (endOfLine, sepBy)
import qualified Data.HashMap.Lazy as HM
import qualified Data.Vector as V
import Data.Scientific (Scientific, floatingOrInteger)
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as TL
import qualified Options.Applicative as O
import Control.Monad (when)
import System.Exit
import Data.String.QQ
import qualified Data.Text.Encoding as T (encodeUtf8, decodeUtf8)
import Test.HUnit
-- Hackage: https://hackage.haskell.org/package/xlsx
{-
let
sheet = def & cellValueAt (1,2) ?~ CellDouble 42.0
& cellValueAt (3,2) ?~ CellText "foo"
xlsx = def & atSheet "List1" ?~ sheet
-}
data Options = Options {
arrayDelim :: String
, jsonExpr :: String
, outputFile :: String
, debugKeyPaths :: Bool
, maxStringLen :: Int
} deriving Show
defaultMaxBytes :: Int
defaultMaxBytes = 32768
parseOpts :: O.Parser Options
parseOpts = Options
<$> O.strOption (O.metavar "DELIM" <> O.value "," <> O.short 'a' <> O.help "Concatentated array elem delimiter. Defaults to comma.")
<*> O.argument O.str (O.metavar "FIELDS" <> O.help "JSON keypath expressions")
<*> O.argument O.str (O.metavar "OUTFILE" <> O.help "Output file to write to. Use '-' to emit binary xlsx data to STDOUT.")
<*> O.switch (O.long "debug" <> O.help "Debug keypaths")
<*> maxStrLen
opts = O.info (O.helper <*> parseOpts)
(O.fullDesc
<> O.progDesc [s|Transform JSON object steam to XLSX.
On STDIN provide an input stream of newline-separated JSON objects. |]
<> O.header "jsonxlsx"
<> O.footer "See https://github.com/danchoi/jsonxlsx for more information.")
maxStrLen :: O.Parser Int
maxStrLen = O.option O.auto
( O.long "maxlen"
<> O.short 'l'
<> O.metavar "MAXLEN"
<> O.value defaultMaxBytes
<> O.help "Limit the length of strings (-1 for unlimited)" )
main = do
Options arrayDelim expr outfile debugKeyPaths maxLen <- O.execParser opts
x <- BL.getContents
ct <- getPOSIXTime
let xs :: [Value]
xs = decodeStream x
ks = parseKeyPath $ T.pack expr
ks' :: [[Key]]
ks' = [k | KeyPath k _ <- ks]
arrayDelim' = T.pack arrayDelim
hs :: [Text] -- header labels
hs = map keyPathToHeader ks
-- extract JSON
let xs' :: [[Value]]
xs' = map (evalToValues arrayDelim' ks') xs
headerCells = map mkHeaderCell hs
headerIndexedCells :: [((Int,Int), Cell)]
headerIndexedCells = zip [(1, x) | x <- [1..]] headerCells
rows :: [[Cell]]
rows = map (map (jsonToCell . truncateStr maxLen)) xs'
rowsIndexedCells :: [[((Int,Int), Cell)]]
rowsIndexedCells = map mkRowIndexedCells $ zip [2..] rows
allCells = concat (headerIndexedCells:rowsIndexedCells)
cellMap :: CellMap
cellMap = M.fromList allCells
when debugKeyPaths $ do
Prelude.putStrLn $ "Key Paths: " ++ show ks
print hs
print headerCells
print headerIndexedCells
exitSuccess
let ws = def { _wsCells = cellMap }
let xlsx = def { _xlSheets = M.fromList [("test", ws)] }
if outfile == "-"
then L8.putStr $ fromXlsx ct xlsx
else L.writeFile outfile $ fromXlsx ct xlsx
mkRowIndexedCells :: (Int, [Cell]) -> [((Int, Int), Cell)]
mkRowIndexedCells (rowNumber, cells) = zip [(rowNumber, x) | x <- [1..]] cells
mkHeaderCell :: Text -> Cell
mkHeaderCell x = def { _cellValue = Just (CellText x) }
truncateStr :: Int -> Value -> Value
truncateStr (-1) v = v
truncateStr maxlen (String xs) =
let ellipsis = ("..." :: Text)
maxlen' = maxlen - (bytelen ellipsis)
in if (bytelen xs) > maxlen
then let s = truncateText maxlen' xs
in String $ s <> ellipsis
else String xs
truncateStr _ v = v
-- This should get Text approximately under the byte limit, erring on the side of being
-- too aggressive.
truncateText :: Int -> Text -> Text
truncateText maxBytes s | bytelen s > maxBytes =
truncateText maxBytes . T.take (T.length s - d) $ s
where d = bytelen s - maxBytes
truncateText _ s = s
bytelen :: Text -> Int
bytelen = B.length . T.encodeUtf8
jsonToCell :: Value -> Cell
jsonToCell (String x) = def { _cellValue = Just (CellText x) }
jsonToCell Null = def { _cellValue = Nothing }
jsonToCell (Number x) = def { _cellValue = Just (CellDouble $ scientificToDouble x) }
jsonToCell (Bool x) = def { _cellValue = Just (CellBool x) }
jsonToCell (Object _) = def { _cellValue = Just (CellText "[Object]") }
jsonToCell (Array _) = def { _cellValue = Just (CellText "[Array]") }
scientificToDouble :: Scientific -> Double
scientificToDouble x =
case floatingOrInteger x of
Left float -> float
Right int -> fromIntegral int
------------------------------------------------------------------------
-- decode JSON object stream
decodeStream :: (FromJSON a) => BL.ByteString -> [a]
decodeStream bs = case decodeWith json bs of
(Just x, xs) | xs == mempty -> [x]
(Just x, xs) -> x:(decodeStream xs)
(Nothing, _) -> []
decodeWith :: (FromJSON a) => Parser Value -> BL.ByteString -> (Maybe a, BL.ByteString)
decodeWith p s =
case Atto.parse p s of
Atto.Done r v -> f v r
Atto.Fail _ _ _ -> (Nothing, mempty)
where f v' r = (\x -> case x of
Success a -> (Just a, r)
_ -> (Nothing, r)) $ fromJSON v'
------------------------------------------------------------------------
-- JSON parsing and data extraction
-- | KeyPath may have an alias for the header output
data KeyPath = KeyPath [Key] (Maybe Text) deriving Show
data Key = Key Text | Index Int deriving (Eq, Show)
parseKeyPath :: Text -> [KeyPath]
parseKeyPath s = case AT.parseOnly pKeyPaths s of
Left err -> error $ "Parse error " ++ err
Right res -> res
keyPathToHeader :: KeyPath -> Text
keyPathToHeader (KeyPath _ (Just alias)) = alias
keyPathToHeader (KeyPath ks Nothing) =
mconcat $ intersperse "." $ [x | Key x <- ks] -- exclude Index keys
spaces = many1 AT.space
pKeyPaths :: AT.Parser [KeyPath]
pKeyPaths = pKeyPath `AT.sepBy` spaces
pKeyPath :: AT.Parser KeyPath
pKeyPath = KeyPath
<$> (AT.sepBy1 pKeyOrIndex (AT.takeWhile1 $ AT.inClass ".["))
<*> (pAlias <|> pure Nothing)
------------------------------------------------------------------------
-- | A column header alias is designated by : followed by alphanum string after keypath
-- The alias string may be quoted with double quotes if it contains strings.
pAlias :: AT.Parser (Maybe Text)
pAlias = do
AT.char ':'
alias <- (AT.takeWhile1 (AT.inClass "a-zA-Z0-9_-")) <|> quotedString
return . Just $ alias
quotedString = AT.char '"' *> (AT.takeWhile1 (AT.notInClass "\"")) <* AT.char '"'
------------------------------------------------------------------------
pKeyOrIndex :: AT.Parser Key
pKeyOrIndex = pIndex <|> pKey
pKey = Key <$> AT.takeWhile1 (AT.notInClass " .[:")
pIndex = Index <$> AT.decimal <* AT.char ']'
evalToValues :: Text -> [[Key]] -> Value -> [Value]
evalToValues arrayDelim ks v = map (evalToValue arrayDelim v) ks
type ArrayDelimiter = Text
evalToValue :: ArrayDelimiter -> Value -> [Key] -> Value
evalToValue d v k = evalKeyPath d k v
evalToText :: ArrayDelimiter -> [Key] -> Value -> Text
evalToText d k v = valToText $ evalKeyPath d k v
-- evaluates the a JS key path against a Value context to a leaf Value
evalKeyPath :: ArrayDelimiter -> [Key] -> Value -> Value
evalKeyPath d [] x@(String _) = x
evalKeyPath d [] x@Null = x
evalKeyPath d [] x@(Number _) = x
evalKeyPath d [] x@(Bool _) = x
evalKeyPath d [] x@(Object _) = x
evalKeyPath d [] x@(Array v) | V.null v = Null
evalKeyPath d [] x@(Array v) =
let vs = V.toList v
xs = intersperse d $ map (evalToText d []) vs
in String . mconcat $ xs
evalKeyPath d (Key key:ks) (Object s) =
case (HM.lookup key s) of
Just x -> evalKeyPath d ks x
Nothing -> Null
evalKeyPath d (Index idx:ks) (Array v) =
let e = (V.!?) v idx
in case e of
Just e' -> evalKeyPath d ks e'
Nothing -> Null
-- traverse array elements with additional keys
evalKeyPath d ks@(Key key:_) (Array v) | V.null v = Null
evalKeyPath d ks@(Key key:_) (Array v) =
let vs = V.toList v
in String . mconcat . intersperse d $ map (evalToText d ks) vs
evalKeyPath _ ((Index _):_) _ = Null
evalKeyPath _ _ _ = Null
valToText :: Value -> Text
valToText (String x) = x
valToText Null = "null"
valToText (Bool True) = "t"
valToText (Bool False) = "f"
valToText (Number x) =
case floatingOrInteger x of
Left float -> T.pack . show $ float
Right int -> T.pack . show $ int
valToText (Object _) = "[Object]"
t = runTestTT tests
str1 = "1234567890"
tests = test [
"no truncation, under limit " ~: str1 @=? truncateStr 10 str1
, "truncation" ~: "12345..." @=? truncateStr 8 str1
, "John Smith bytelength" ~: 10 @=? bytelen "John Smith"
, "John Smith" ~: "John Smith" @=? truncateStr 10 "John Smith"
]
| danchoi/jsonxlsx | Main.hs | mit | 9,836 | 0 | 16 | 2,230 | 3,256 | 1,718 | 1,538 | 216 | 3 |
{-# LANGUAGE CPP #-}
module Internal.API where
import Internal.FFI
import Internal.Type
#ifdef ghcjs_HOST_OS
import Data.JSString
import GHCJS.Foreign.Callback (Callback, asyncCallback1)
import GHCJS.Marshal (FromJSVal (..))
import GHCJS.Types (JSVal)
#else
data Callback a = Callback a
#endif
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
#ifndef ghcjs_HOST_OS
notImplemented :: a
notImplemented = error "Client side call not implemented on server side."
#endif
getDocument :: IO Elem
#ifdef ghcjs_HOST_OS
getDocument = Elem <$> js_document
#else
getDocument = notImplemented
#endif
getBody :: IO Elem
#ifdef ghcjs_HOST_OS
getBody = Elem <$> js_documentBody
#else
getBody = notImplemented
#endif
newElem :: JSString -> IO Elem
#ifdef ghcjs_HOST_OS
newElem = (Elem <$>) . js_documentCreateNode
#else
newElem = notImplemented
#endif
newTextElem :: JSString -> IO Elem
#ifdef ghcjs_HOST_OS
newTextElem = (Elem <$>) . js_createTextNode
#else
newTextElem = notImplemented
#endif
parent :: Elem -> IO Elem
#ifdef ghcjs_HOST_OS
parent (Elem c) = Elem <$> js_parentNode c
#else
parent = notImplemented
#endif
-- | Appends one element to another.
addChild :: Elem -- ^ child element to append
-> Elem -- ^ parent element
-> IO ()
#ifdef ghcjs_HOST_OS
addChild (Elem c) (Elem p) = js_appendChild p c
#else
addChild = notImplemented
#endif
-- | Remove child from parent.
removeChild :: Elem -- ^ child to remove
-> Elem -- ^ parent node
-> IO ()
#ifdef ghcjs_HOST_OS
removeChild (Elem c) (Elem p) = js_removeChild p c
#else
removeChild = notImplemented
#endif
clearChildren :: Elem -> IO ()
#ifdef ghcjs_HOST_OS
clearChildren (Elem e) = js_clearChildren e
#else
clearChildren = notImplemented
#endif
replace :: Elem -> Elem -> IO Elem
#ifdef ghcjs_HOST_OS
replace oe@(Elem o) (Elem n) =
do (Elem par) <- parent oe
js_replaceChild par o n
return (Elem n)
#else
replace = notImplemented
#endif
setAttr :: Elem -> PropId -> JSString -> IO ()
#ifdef ghcjs_HOST_OS
setAttr (Elem e) p = js_setAttribute e p
#else
setAttr = notImplemented
#endif
setInnerHTML :: Elem -> JSString -> IO ()
#ifdef ghcjs_HOST_OS
setInnerHTML (Elem e) = js_setInnerHtml e
#else
setInnerHTML = notImplemented
#endif
getElemById :: JSString -> IO Elem
#ifdef ghcjs_HOST_OS
getElemById = (Elem <$>) . js_getElementById
#else
getElemById = notImplemented
#endif
queryAll :: JSString -> IO [Elem]
#ifdef ghcjs_HOST_OS
queryAll query =
do res <- js_querySelectorAll query
fromJSValUncheckedListOf res
#else
queryAll = notImplemented
#endif
-- | Attach an event listener to element.
--
-- Returns an action removing listener, though you still have to release
-- callback manually.
--
-- If you are sure that you do not want to remove handler consider using
-- 'onEvent''.
onEvent :: NamedEvent e => Elem -> e -> Callback (JSVal -> IO()) -> IO (IO ())
-- | Attach endless event listener to element.
--
-- Use this function to attach event handlers which supposed not to be removed
-- during application run.
onEvent' :: NamedEvent e => Elem -> e -> (JSVal -> IO()) -> IO ()
-- | Remove attached event listener.
--
-- Normally you can use action returned by 'onEvent' to detach event listener,
-- however you can also use this function directly.
removeEvent :: NamedEvent e => Elem -> e -> Callback (JSVal -> IO ()) -> IO ()
#ifdef ghcjs_HOST_OS
onEvent el'@(Elem el) et cb =
do js_addEventListener el e cb
return $ removeEvent el' e cb
where
e = pack (eventName et)
onEvent' (Elem el) et hnd =
do cb <- asyncCallback1 hnd
js_addEventListener el e cb
where
e = pack (eventName et)
removeEvent (Elem el) et cb = js_removeEventListener el (pack (eventName et)) cb
#else
onEvent = notImplemented
onEvent' = notImplemented
removeEvent = notImplemented
#endif
--------------------------------------------------------------------------------
| agocorona/ghcjs-perch | src/Internal/API.hs | mit | 4,094 | 0 | 13 | 771 | 890 | 481 | 409 | 43 | 1 |
{-# LANGUAGE QuasiQuotes #-}
module TestLedgerXml (ledgerXmlSpec) where
import Test.Hspec (Spec, describe, it, shouldBe)
import Data.String.Interpolate (i)
import Data.String.Interpolate.Util (unindent)
import qualified Text.XML.HXT.Core as HXT
import qualified Data.Expenses.Ledger.Xml as LX
import qualified Data.Expenses.Types as LT
ledgerXmlSpec :: Spec
ledgerXmlSpec =
describe "Data.Expenses.Ledger.Xml" $ do
describe "textAsDecimal" $ do
it "should return digits for simple input" $ do
HXT.runLA LX.textAsDecimal "0" `shouldBe` [0]
HXT.runLA LX.textAsDecimal "5" `shouldBe` [5]
HXT.runLA LX.textAsDecimal "123" `shouldBe` [123]
it "should parse negative numbers" $ do
HXT.runLA LX.textAsDecimal "-5" `shouldBe` [-5]
it "should parse non-integer numbers" $ do
HXT.runLA LX.textAsDecimal "1.23" `shouldBe` [1.23]
it "should return empty for non-digit input" $ do
HXT.runLA LX.textAsDecimal "" `shouldBe` []
HXT.runLA LX.textAsDecimal "x" `shouldBe` []
describe "transctionsInXmlDocument" $
it "should correctly parse transactions" $ do
let
document = unindent [i|
<?xml version="1.0" encoding="utf-8"?>
<ledger>
<transactions>
<transaction>
<date>2018/01/01</date>
<payee>food</payee>
<postings>
<posting>
<account>
<name>Expenses:Food</name>
</account>
<post-amount>
<amount>
<commodity flags="S"><symbol>SGD</symbol></commodity>
<quantity>5</quantity>
</amount>
</post-amount>
</posting>
<posting generated="true">
<account><name>Assets:SGD</name></account>
<post-amount>
<amount>
<commodity flags="S"><symbol>SGD</symbol></commodity>
<quantity>-5</quantity>
</amount>
</post-amount>
</posting>
</postings>
</transaction>
</transactions>
</ledger>
|]
LX.transactionsInXmlDocument document
`shouldBe`
[ LX.Transaction
"2018/01/01"
"food"
[ LX.Posting "Expenses:Food" (LX.Amount "SGD" 5)
, LX.Posting "Assets:SGD" (LX.Amount "SGD" (-5))
]
]
describe "simpleTransactionFromTransaction" $
it "simple case" $
LX.simpleTransactionFromTransaction
(LX.Transaction
"2018/01/01"
"food"
[ LX.Posting "Expenses:Food" (LX.Amount "SGD" 5)
, LX.Posting "Assets:SGD" (LX.Amount "SGD" (-5))
])
`shouldBe`
Just
(LT.SimpleTransaction
{ LT.transactionDescription = "food"
, LT.transactionAmount =
LT.Amount
{ LT.moneyAmount = 5
, LT.moneyCurrency = Just "SGD"
, LT.moneyIsApprox = False
}
, LT.transactionDebittedAccount = "Expenses:Food"
, LT.transactionCredittedAccount = "Assets:SGD"
})
| rgoulter/expenses-csv-utils | test/TestLedgerXml.hs | mit | 3,475 | 0 | 18 | 1,344 | 557 | 299 | 258 | 53 | 1 |
module Util.Misc where
import Numeric (showFFloat)
import Data.List.Split (splitOn)
import qualified Turtle as T
import Data.Text as Text (pack, unpack, replace, head, Text)
import Data.Attoparsec.Text (parseOnly, signed, decimal)
import TextShow (showt)
import Numeric (showFFloat)
import Control.Monad (when, (<=<))
import Control.Monad.Fail (MonadFail)
import Data.Monoid ((<>))
import qualified Shelly as S (shelly, which, toTextWarn)
parseInt :: (Integral a, Monad m, MonadFail m) => Text -> m a
parseInt = handle . parseOnly (signed decimal)
handle :: (Monad m, MonadFail m) => Either String a -> m a
handle e = case e of
Left errMsg -> fail errMsg
Right a -> return a
formatDouble :: Int -> Double -> T.Text
formatDouble numOfDecimals floatNum = showt $ showFFloat (Just numOfDecimals) floatNum ""
strToDbls :: String -> [Double]
strToDbls = map read . (splitOn ",")
toTxt :: T.FilePath -> T.Text
toTxt = T.format T.fp
-- note: can't contain newlines
echoTxt :: T.MonadIO io => Text -> io ()
echoTxt = T.echo . T.unsafeTextToLine
isDotFile :: T.FilePath -> Bool
isDotFile x = Text.head (toTxt x) /= '.'
dropDotFiles :: [T.FilePath] -> [T.FilePath]
dropDotFiles = filter isDotFile
exec :: T.MonadIO io => Text -> io (T.ExitCode, Text)
exec cmd = T.shellStrict cmd T.empty
mkdirDestructive :: T.MonadIO io => T.FilePath -> io ()
mkdirDestructive path = do
dirExists <- T.testdir path
when dirExists (T.rmtree path)
T.mkdir path
mkdirUniq :: T.MonadIO io => T.FilePath -> io ()
mkdirUniq = T.mkdir <=< uniqPathName
uniqPathName :: T.MonadIO io => T.FilePath -> io T.FilePath
uniqPathName path = do
dirExists <- T.testdir path
case dirExists of
False -> return path
True -> uniqPathNumbered path 0
where
uniqPathNumbered :: T.MonadIO io => T.FilePath -> Int -> io T.FilePath
uniqPathNumbered path i = do
let ip1 = i + 1
ip1path = T.fromText ((toTxt path) <> "-" <> (showt ip1))
enumDirExists <- T.testdir ip1path
case enumDirExists of
True -> uniqPathNumbered path ip1
False -> return ip1path
getPythonPath :: T.MonadIO io => io Text
getPythonPath = S.shelly $ do
maybP <- S.which "python3"
case maybP of
Nothing -> error "Error: python3 not found in path."
Just p -> S.shelly $ S.toTextWarn p
count :: Eq a => a -> [a] -> Int
count x = length . filter (x ==)
| dpren/pitch-extractor | src/Util/Misc.hs | mit | 2,489 | 0 | 22 | 591 | 924 | 476 | 448 | 61 | 3 |
module Utils where
import Data.Word
(|>) :: a -> (a -> b) -> b
(|>) = flip ($)
infixl 0 |>
| sgraf812/fndiff | src/Utils.hs | mit | 93 | 0 | 8 | 23 | 48 | 30 | 18 | 5 | 1 |
import System.IO
main = do
withFile "input.txt" ReadMode (\ handle -> do
-- hSetBuffering handle $ BlockBuffering (Just 2048)
contents <- hGetContents handle
putStr contents)
| janherich/learning-haskell | files.hs | epl-1.0 | 283 | 0 | 13 | 133 | 46 | 22 | 24 | 5 | 1 |
module Tema_20a_Monticulo_Spec (main, spec) where
import Tema_20.Monticulo
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "inserta" $
it "e1" $
show (inserta 3 ejM1) `shouldBe`
"M 1 2 (M 4 1 (M 8 1 Vacio Vacio) Vacio) (M 3 1 (M 6 1 Vacio Vacio) Vacio)"
describe "resto" $
it "e1" $
show (resto ejM1) `shouldBe`
"M 4 2 (M 8 1 Vacio Vacio) (M 6 1 Vacio Vacio)"
describe "valido" $ do
it "e2" $
valido ejM1 `shouldBe` True
it "e2" $
valido ejM2 `shouldBe` True
it "e3" $
valido ejM3 `shouldBe` True
| jaalonso/I1M-Cod-Temas | test/Tema_20a_Monticulo_Spec.hs | gpl-2.0 | 610 | 0 | 12 | 182 | 183 | 91 | 92 | 22 | 1 |
{- |
Module : $Header$
Copyright : (c) Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
RDF syntax parser
-}
module RDF.Parse where
import Common.Parsec
import Common.Lexer
import Common.AnnoParser (newlineOrEof)
import Common.Token (criticalKeywords)
import OWL2.AS
import OWL2.Parse hiding (stringLiteral, literal, skips, uriP)
import RDF.AS
import RDF.Symbols
import Data.Either
import qualified Data.Map as Map
import Text.ParserCombinators.Parsec
uriP :: CharParser st QName
uriP =
skips $ try $ checkWithUsing showQN uriQ $ \ q ->
if null $ namePrefix q then notElem (localPart q) criticalKeywords else True
-- * hets symbols parser
rdfEntityType :: CharParser st RDFEntityType
rdfEntityType = choice $ map (\ f -> keyword (show f) >> return f)
rdfEntityTypes
{- | parses an entity type (subject, predicate or object) followed by a
comma separated list of IRIs -}
rdfSymbItems :: GenParser Char st SymbItems
rdfSymbItems = do
ext <- optionMaybe rdfEntityType
iris <- rdfSymbs
return $ SymbItems ext iris
-- | parse a comma separated list of uris
rdfSymbs :: GenParser Char st [IRI]
rdfSymbs = uriP >>= \ u -> do
commaP `followedWith` uriP
us <- rdfSymbs
return $ u : us
<|> return [u]
-- | parse a possibly kinded list of comma separated symbol pairs
rdfSymbMapItems :: GenParser Char st SymbMapItems
rdfSymbMapItems = do
ext <- optionMaybe rdfEntityType
iris <- rdfSymbPairs
return $ SymbMapItems ext iris
-- | parse a comma separated list of uri pairs
rdfSymbPairs :: GenParser Char st [(IRI, Maybe IRI)]
rdfSymbPairs = uriPair >>= \ u -> do
commaP `followedWith` uriP
us <- rdfSymbPairs
return $ u : us
<|> return [u]
-- * turtle syntax parser
skips :: CharParser st a -> CharParser st a
skips = (<< skipMany
(forget space <|> parseComment <|> nestCommentOut <?> ""))
charOrQuoteEscape :: CharParser st String
charOrQuoteEscape = try (string "\\\"") <|> fmap return anyChar
longLiteral :: CharParser st (String, Bool)
longLiteral = do
string "\"\"\""
ls <- flat $ manyTill charOrQuoteEscape $ try $ string "\"\"\""
return (ls, True)
shortLiteral :: CharParser st (String, Bool)
shortLiteral = do
char '"'
ls <- flat $ manyTill charOrQuoteEscape $ try $ string "\""
return (ls, False)
stringLiteral :: CharParser st RDFLiteral
stringLiteral = do
(s, b) <- try longLiteral <|> shortLiteral
do
string cTypeS
d <- datatypeUri
return $ RDFLiteral b s $ Typed d
<|> do
string "@"
t <- skips $ optionMaybe languageTag
return $ RDFLiteral b s $ Untyped t
<|> skips (return $ RDFLiteral b s $ Typed $ mkQName "string")
literal :: CharParser st RDFLiteral
literal = do
f <- skips $ try floatingPointLit
<|> fmap decToFloat decimalLit
return $ RDFNumberLit f
<|> stringLiteral
parseBase :: CharParser st Base
parseBase = do
pkeyword "@base"
base <- skips uriP
skips $ char '.'
return $ Base base
parsePrefix :: CharParser st Prefix
parsePrefix = do
pkeyword "@prefix"
p <- skips (option "" prefix << char ':')
i <- skips uriP
skips $ char '.'
return $ Prefix p i
parsePredicate :: CharParser st Predicate
parsePredicate = fmap Predicate $ skips uriP
parseSubject :: CharParser st Subject
parseSubject =
fmap Subject (skips uriP)
<|> fmap SubjectList
(between (skips $ char '[') (skips $ char ']') $ skips parsePredObjList)
<|> fmap SubjectCollection
(between (skips $ char '(') (skips $ char ')') $ many parseObject)
parseObject :: CharParser st Object
parseObject = fmap ObjectLiteral literal <|> fmap Object parseSubject
parsePredObjects :: CharParser st PredicateObjectList
parsePredObjects = do
pr <- parsePredicate
objs <- sepBy parseObject $ skips $ char ','
return $ PredicateObjectList pr objs
parsePredObjList :: CharParser st [PredicateObjectList]
parsePredObjList = sepEndBy parsePredObjects $ skips $ char ';'
parseTriples :: CharParser st Triples
parseTriples = do
s <- parseSubject
ls <- parsePredObjList
skips $ char '.'
return $ Triples s ls
parseComment :: CharParser st ()
parseComment = do
tryString "#"
forget $ skips $ manyTill anyChar newlineOrEof
parseStatement :: CharParser st Statement
parseStatement = fmap BaseStatement parseBase
<|> fmap PrefixStatement parsePrefix <|> fmap Statement parseTriples
basicSpec :: CharParser st TurtleDocument
basicSpec = do
many parseComment
ls <- many parseStatement
return $ TurtleDocument dummyQName Map.empty ls
predefinedPrefixes :: RDFPrefixMap
predefinedPrefixes = Map.fromList $ zip
["rdf", "rdfs", "dc", "owl", "ex", "xsd"]
$ rights $ map (parse uriQ "")
[ "<http://www.w3.org/1999/02/22-rdf-syntax-ns#>"
, "<http://www.w3.org/2000/01/rdf-schema#>"
, "<http://purl.org/dc/elements/1.1/>"
, "<http://www.w3.org/2002/07/owl#>"
, "<http://www.example.org/>"
, "<http://www.w3.org/2001/XMLSchema#>" ]
| nevrenato/Hets_Fork | RDF/Parse.hs | gpl-2.0 | 5,139 | 0 | 13 | 1,088 | 1,481 | 728 | 753 | 133 | 2 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Language.Slicer.Resugar
( RExp, resugar
) where
import Language.Slicer.Absyn ( Con, TyCtx, getTyDeclByName
, conL, conR )
import Language.Slicer.Core
import Language.Slicer.Env
import Language.Slicer.Error
import Language.Slicer.Monad
import Language.Slicer.Monad.Desugar
import Language.Slicer.Primitives
import Language.Slicer.UpperSemiLattice
import qualified Control.Arrow as A
import Control.DeepSeq ( NFData )
import GHC.Generics ( Generic )
import Prelude hiding ( (<>) )
import Text.PrettyPrint.HughesPJClass
-- See GitHub ticket #10 and pull request #20 for discussion and thoughts on
-- possible improvements to the resugaring mechanism. See #24 for thoughts on
-- resugaring of error messages.
--------------------------------------------------------------------------------
-- RESUGARED EXPRESSION LANGUAGE --
--------------------------------------------------------------------------------
data RExp = RVar Var | RLet Var RExp RExp
| RUnit
| RBool Bool | RIf RExp RExp RExp
| RInt Integer | RDouble Double
| ROp Primitive [RExp]
| RString String
| RPair RExp RExp | RFst RExp | RSnd RExp
| RCon Con RExp | RCase RExp RMatch
| RFun RCode | RApp RExp [RExp]
| RTrace RExp
| RHole
-- References
| RRef RExp | RDeref RExp | RAssign RExp RExp
| RSeq RExp RExp | RWhile RExp RExp
-- Arrays
| RArr RExp RExp | RArrGet RExp RExp | RArrSet RExp RExp RExp
-- Exceptions
| RRaise RExp | RCatch RExp Var RExp | RException RExp
deriving (Show, Eq, Ord, Generic, NFData)
data RCode = RRec Var [Var] RExp -- name, args, body
deriving (Show, Eq, Ord, Generic, NFData)
data RMatch = RMatch [ ( Con, Maybe Var, RExp ) ]
deriving (Show, Eq, Ord, Generic, NFData)
--------------------------------------------------------------------------------
-- UNEVALUATION --
--------------------------------------------------------------------------------
-- Unevaluation squashes a trace back down into an expression. When resugaring
-- a trace it first gets unevaluated into an expression and then that expression
-- is resugared. The benefit is that the fairly complicated logic of resugaring
-- is written only once at the expense of having to define a relatively simple
-- unevaluation logic.
class Uneval a b | a -> b where
uneval :: a -> b
instance Uneval Trace Exp where
uneval (TCaseL t x t1) = ECase (uneval t) (Match (x, uneval t1) (bot, bot))
uneval (TCaseR t x t2) = ECase (uneval t) (Match (bot, bot) (x, uneval t2))
uneval (TIfThen t t1) = EIf (uneval t) (uneval t1) bot
uneval (TIfElse t t2) = EIf (uneval t) bot (uneval t2)
uneval (TIfExn t) = EIf (uneval t) bot bot
uneval (TOp _ f es) = EOp f (map uneval es)
uneval (TCall t1 t2 _ _) = EApp (uneval t1) (uneval t2)
uneval (TCallExn t1 t2) = EApp (uneval t1) (uneval t2)
uneval (TRef _ t) = ERef (uneval t)
uneval (TDeref _ t) = EDeref (uneval t)
uneval (TAssign _ t1 t2) = EAssign (uneval t1) (uneval t2)
uneval (TArr _ t1 t2) = EArr (uneval t1) (uneval t2)
uneval (TArrGet _ t1 t2) = EArrGet (uneval t1) (uneval t2)
uneval (TArrSet _ t1 t2 t3) = EArrSet (uneval t1) (uneval t2) (uneval t3)
uneval (TWhileDone t) = EWhile (uneval t) bot
uneval (TWhileStep t1 t2 _) = (EWhile (uneval t1) (uneval t2))
uneval (TRaise t) = ERaise (uneval t)
uneval (TTry t) = ETryWith (uneval t) bot bot
uneval (TTryWith t x h) = ETryWith (uneval t) x (uneval h)
uneval (TExp expr) = Exp (uneval expr)
uneval (TSlicedHole _ _) = EHole
instance Uneval a b => Uneval (Syntax a) (Syntax b) where
uneval (Var x) = Var x
uneval Unit = Unit
uneval Hole = Hole
uneval (CBool b) = CBool b
uneval (CInt i) = CInt i
uneval (CDouble d) = CDouble d
uneval (CString s) = CString s
uneval (Fun k) = Fun k
uneval (Let x e1 e2) = Let x (uneval e1) (uneval e2)
uneval (Pair e1 e2) = Pair (uneval e1) (uneval e2)
uneval (Fst e) = Fst (uneval e)
uneval (Snd e) = Snd (uneval e)
uneval (InL e) = InL (uneval e)
uneval (InR e) = InR (uneval e)
uneval (Roll tv e) = Roll tv (uneval e)
uneval (Unroll tv e) = Unroll tv (uneval e)
uneval (Seq e1 e2) = Seq (uneval e1) (uneval e2)
instance Uneval a b => Uneval (Code a) (Code b) where
uneval (Rec name arg body label) = Rec name arg (uneval body) label
--------------------------------------------------------------------------------
-- RESUGARING --
--------------------------------------------------------------------------------
resugar :: Resugarable a => TyCtx -> a -> SlM RExp
resugar decls expr = runDesugarM decls emptyEnv (resugarM expr)
-- | Class of things that can be resugared into surface syntax
class Resugarable a where
resugarM :: a -> DesugarM RExp
instance (Resugarable a, AskConstrs a, Show a) => Resugarable (Syntax a) where
resugarM (Var v) = return (RVar v)
resugarM (Let v e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RLet v e1' e2')
resugarM Unit = return RUnit
resugarM (CBool b) = return (RBool b)
resugarM (CInt i) = return (RInt i)
resugarM (CDouble d) = return (RDouble d)
resugarM (CString s) = return (RString s)
resugarM (Pair e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RPair e1' e2')
resugarM (Fst e)
= do e' <- resugarM e
return (RFst e')
resugarM (Snd e)
= do e' <- resugarM e
return (RSnd e')
resugarM (Roll dataty e) | Just e' <- maybeInL e
= do e'' <- resugarM e'
decls <- getDecls
case getTyDeclByName decls dataty of
Just decl -> return (RCon (conL decl) e'')
Nothing -> resugarError ("Unknown data type: " ++ show dataty)
resugarM (Roll dataty e) | Just e' <- maybeInR e
= do e'' <- resugarM e'
decls <- getDecls
case getTyDeclByName decls dataty of
Just decl -> return (RCon (conR decl) e'')
Nothing -> resugarError ("Unknown data type: " ++ show dataty)
resugarM (Fun code@(Rec name' _ _ _))
= -- Functions in Core are single-argument. Need to traverse body to
-- reconstruct multi-argument functions
do let (args, body) = resugarMultiFun code
body' <- resugarM body
return (RFun (RRec name' args body'))
resugarM Hole = return RHole
resugarM (Seq e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RSeq e1' e2')
-- These should never happen in a well-formed program
resugarM (InL e)
= resugarError ("Left data constructor not wrapped in roll, can't resugar "
++ show e)
resugarM (InR e)
= resugarError ("Right data constructor not wrapped in roll, can't resugar "
++ show e)
resugarM (Roll _ e)
= resugarError ("Non-constructor expression wrapped in a roll, can't resugar "
++ show e)
resugarM (Unroll _ e)
= resugarError ("Unroll outside of case scrutinee, can't resugar "
++ show e)
instance Resugarable Exp where
resugarM (EIf c e1 e2)
= do c' <- resugarM c
e1' <- resugarM e1
e2' <- resugarM e2
return (RIf c' e1' e2')
resugarM (ECase (EUnroll dataty e) (Match alt1 alt2))
= do e' <- resugarM e
m' <- resugarMatch dataty alt1 alt2
return (RCase e' m')
resugarM (ECase e _)
= resugarError ("Case scrutinee not wrapped in unroll, can't resugar "
++ show e)
resugarM (EApp e1@(EApp _ _) e2)
= do ~(RApp e1' e1'') <- resugarM e1
e2' <- resugarM e2
return (RApp e1' (e1'' ++ [e2']))
resugarM (EApp e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RApp e1' [e2'])
resugarM (EOp f args)
= do args' <- mapM resugarM args
return (ROp f args')
resugarM (ETrace e)
= do e' <- resugarM e
return (RTrace e')
resugarM (ERef e)
= do e' <- resugarM e
return (RRef e')
resugarM (EDeref e)
= do e' <- resugarM e
return (RDeref e')
resugarM (EAssign e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RAssign e1' e2')
resugarM (EWhile e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RWhile e1' e2')
resugarM (EArr e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RArr e1' e2')
resugarM (EArrGet e1 e2)
= do e1' <- resugarM e1
e2' <- resugarM e2
return (RArrGet e1' e2')
resugarM (EArrSet e1 e2 e3)
= do e1' <- resugarM e1
e2' <- resugarM e2
e3' <- resugarM e3
return (RArrSet e1' e2' e3')
resugarM (ERaise e)
= do e' <- resugarM e
return (RRaise e')
resugarM (ETryWith e x h)
= do e' <- resugarM e
h' <- resugarM h
return (RCatch e' x h')
resugarM (Exp e) = resugarM e
instance Resugarable Trace where
resugarM t = resugarM (uneval t)
instance Resugarable Value where
resugarM VHole = return RHole
resugarM VUnit = return RUnit
resugarM (VBool b) = return (RBool b)
resugarM (VInt i) = return (RInt i)
resugarM (VDouble d) = return (RDouble d)
resugarM (VString s) = return (RString s)
resugarM (VPair v1 v2)
= do e1 <- resugarM v1
e2 <- resugarM v2
return (RPair e1 e2)
resugarM (VRoll dataty (VInL v))
= do e <- resugarM v
decls <- getDecls
case getTyDeclByName decls dataty of
Just decl -> return (RCon (conL decl) e)
Nothing -> resugarError ("Unknown data type: " ++ show dataty)
resugarM (VRoll dataty (VInR v))
= do e <- resugarM v
decls <- getDecls
case getTyDeclByName decls dataty of
Just decl -> return (RCon (conR decl) e)
Nothing -> resugarError ("Unknown data type: " ++ show dataty)
resugarM (VClosure v _) = resugarM (EFun v)
resugarM (VExp v _) = resugarM v
resugarM (VStoreLoc _) = return (RRef RHole)
resugarM (VArrLoc _ n) = return (RArr (RInt (toInteger n)) RHole)
resugarM (VTrace _ t _)
= do e <- resugarM t
return (RTrace e)
resugarM (VInL v)
= resugarError ("Left data value not wrapped in roll, can't resugar "
++ show v)
resugarM (VInR v)
= resugarError ("Right data value not wrapped in roll, can't resugar "
++ show v)
resugarM (VRoll _ v)
= resugarError ("Non-constructor value wrapped in a roll, can't resugar "
++ show v)
resugarM VStar
= resugarError ("Don't know how to resugar stars. " ++
"Where did you get this value from?" )
instance Resugarable Outcome where
resugarM OHole = return RHole
resugarM (ORet v) = resugarM v
resugarM (OExn v) = do e <- resugarM v
return (RException e)
resugarM OStar
= resugarError ("Don't know how to resugar stars. " ++
"Where did you get this value from?" )
--------------------------------------------------------------------------------
-- RESUGARING - HELPER FUNCTIONS --
--------------------------------------------------------------------------------
resugarMatch :: Resugarable a => TyVar -> (Maybe Var, a) -> (Maybe Var, a)
-> DesugarM RMatch
resugarMatch dataty (v1, e1) (v2, e2)
= do decls <- getDecls
e1' <- resugarM e1
e2' <- resugarM e2
case getTyDeclByName decls dataty of
Just decl ->
return (RMatch [ ((conL decl), v1, e1')
, ((conR decl), v2, e2') ] )
Nothing -> resugarError ("Unknown data type: " ++ show dataty)
resugarMultiFun :: Code Exp -> ([Var], Exp)
resugarMultiFun c = reverse `A.first` go [] c
where go :: [Var] -> Code Exp -> ([Var], Exp)
go args (Rec _ arg (EFun code) _) = go (arg:args) code
go args (Rec _ arg body _) = (arg:args, body)
-- | Class of syntax trees that can contain InL or InR constructors. We need
-- this in Resugarable instance for Syntax, because Syntax can be
-- parameterized by Exp or Trace and in the Roll case we need a uniform way of
-- pattern matching on InL/InR.
class AskConstrs a where
maybeInL :: a -> Maybe a
maybeInR :: a -> Maybe a
instance AskConstrs Exp where
maybeInL (EInL e) = Just e
maybeInL _ = Nothing
maybeInR (EInR e) = Just e
maybeInR _ = Nothing
instance AskConstrs Trace where
maybeInL (TInL e) = Just e
maybeInL _ = Nothing
maybeInR (TInR e) = Just e
maybeInR _ = Nothing
--------------------------------------------------------------------------------
-- PRETTY PRINTING OF RESUGARED EXPRESSIONS --
--------------------------------------------------------------------------------
instance Pretty RExp where
pPrint RHole = text "_"
pPrint RUnit = text "()"
pPrint (RInt i) = integer i
pPrint (RDouble d) = double d
pPrint (RString s) = text (show s)
pPrint (RBool b) = if b then text "true" else text "false"
pPrint (RVar x) = pPrint x
pPrint (RLet x e1 e2)
= text "let" <+> pPrint x <+> equals <+> pPrint e1 <+> text "in" $$
pPrint e2
pPrint (RIf e e1 e2)
= text "if" <+> pPrint e
$$ text "then" <+> pPrint e1
$$ text "else" <+> pPrint e2
pPrint (ROp f es)
= case (isInfixOp f, es) of
(True, [e1, e2]) ->
partial_parensOpt e1 <+> pPrint f <+>
partial_parensOpt e2
_ -> pPrint f <> parens (hcat (punctuate comma (map pPrint es)))
pPrint (RPair e1 e2) = parens (pPrint e1 <> comma <+> pPrint e2)
pPrint (RFst e) = text "fst" <+> partial_parensOpt e
pPrint (RSnd e) = text "snd" <+> partial_parensOpt e
-- Special case to handle nullary constructors
pPrint (RCon c RUnit) = text (show c)
pPrint (RCon c e) = text (show c) <+> partial_parensOpt e
pPrint (RCase e m) = text "case" <+> pPrint e <+> text "of" $$
nest 2 (pPrint m)
pPrint (RFun k) = pPrint k
pPrint (RApp e1 e2) = fsep (pPrint e1 : map pPrint e2)
pPrint (RRef e) = text "ref" <+> partial_parensOpt e
pPrint (RDeref e) = text "!" <> partial_parensOpt e
pPrint (RAssign e1 e2) = pPrint e1 <+> text ":=" <+> pPrint e2
pPrint (RArr e1 e2) = text "array" <> parens (pPrint e1 <> comma <+> pPrint e2)
pPrint (RArrGet e1 e2) = text "get" <> parens (pPrint e1 <> comma <+> pPrint e2)
pPrint (RArrSet e1 e2 e3) = text "set" <> parens (pPrint e1 <> comma <+>
pPrint e2 <> comma <+> pPrint e3)
pPrint (RSeq e1 e2) = fsep [pPrint e1, text ";;", pPrint e2]
pPrint (RWhile e1 e2) = parens (text "while" <+> pPrint e1 <+> text "do" $$
nest 2 (pPrint e2))
pPrint (RTrace e) = text "trace" <+> partial_parensOpt e
pPrint (RRaise e) = text "raise" <+> partial_parensOpt e
pPrint (RCatch e x h) = text "try" $$ nest 2 (pPrint e) $$
text "with" <+> pPrint x <+> text "=>" $$
nest 2 (pPrint h)
pPrint (RException e) = text "<exception>" <+> partial_parensOpt e
instance Pretty RMatch where
pPrint (RMatch ms) = vcat (punctuate semi (map pp_match ms))
where pp_match (con, var, expr)
= text (show con) <+> pPrint var <+> text "->" <+>
pPrint expr
instance Pretty RCode where
pPrint (RRec name args body)
= parens (fsep [text "fun" <+> pPrint name <+> sep (map pPrint args)
<+> text "=>", (pPrint body)])
-- | Should the expression be wrapped in parentheses?
parenth :: RExp -> Bool
parenth RHole = False
parenth (RBool _) = False
parenth (RInt _) = False
parenth (RDouble _) = False
parenth (RString _) = False
parenth RUnit = False
parenth (RPair _ _) = False
parenth (RVar _) = False
parenth (RDeref _) = False
parenth _ = True
-- | Pretty-print an expression, wrapping it in parentheses if necessary
partial_parensOpt :: RExp -> Doc
partial_parensOpt e
= let doc = pPrint e in
if parenth e then parens doc else doc
| jstolarek/slicer | lib/Language/Slicer/Resugar.hs | gpl-3.0 | 17,789 | 0 | 16 | 5,948 | 5,940 | 2,888 | 3,052 | 357 | 2 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE GADTs #-}
-- | This module defines typed REST API signatures, common for both servers and
-- clients.
module Network.HTTP.Rest.Signature (
-- * REST Resource Signatures
RestSig(..),
HttpMethodKind(..),
HttpPathKind(..),
PathComponentKind(..),
HttpPathArgument(..),
PayloadEncoding(..),
-- Auxiliary definitions
MyProxy(..),
sText,
pathHead,
pathTail
) where
import Data.Text (Text, pack, unpack)
import Data.Typeable
import GHC.Exts
import GHC.TypeLits
-- | 'Proxy' defines its own, useless 'Show' instance :-(.
data MyProxy (x :: k) = MyProxy
-- | A data type representing the signature of a single REST resource. It uses
-- the datatype 'HttpMethodKind' promoted to a kind to express a type-list of accepted
-- http methods.
-- Note that @method :: HttpMethodKind@ and @path :: HttpPathKind@.
data RestSig :: HttpPathKind -> HttpMethodKind -> * -> * where
RestResource :: RestSig path method encoding
-- | A datatype representing a Http method. It's used promoted to a kind in its use in 'RestSig'.
data HttpMethodKind where
HttpGet :: response -> HttpMethodKind
HttpPost :: request -> response -> HttpMethodKind
HttpPut :: request -> response -> HttpMethodKind
HttpDelete :: response -> HttpMethodKind
-- | A HttpPath used exclusively as a Kind, to be able to specify resource urls as types only.
--
-- (constructor doc, to be placed appropriately pending haddock ticket #43):
--
-- [@component :/: rest@] Path composition, infix and right associative, with precedence 4,
-- which is low enough to not require parentheses when applied with 'A' and 'S'.
--
-- [@Nil@] Terminate a http path.
--
data HttpPathKind where
(:/:) :: PathComponentKind -> HttpPathKind -> HttpPathKind
Nil :: HttpPathKind
infixr 4 :/:
-- | The kind containing types of 'HttpPathKind' components. Components may be
-- either literals or variables with a type.
--
-- (constructor doc, to be placed appropriately pending haddock ticket #43):
--
-- [@S \"symbol\"@] A constant literal path component.
--
-- [@A \"symbol\" type@] A variable path component.
data PathComponentKind where
S :: Symbol -> PathComponentKind
A :: Symbol -> (a :: *) -> PathComponentKind
instance (KnownSymbol a) => Show (MyProxy (S a)) where
show (MyProxy :: MyProxy (S a)) = symbolVal (MyProxy :: MyProxy a)
instance (KnownSymbol a, Typeable x) => Show (MyProxy (A a (x :: *))) where
show (MyProxy :: MyProxy (A a x)) = "{" ++ symbolVal (MyProxy :: MyProxy a) ++ " :: " ++ show (typeOf (undefined :: x)) ++ "}"
instance (Show (MyProxy c), Show (MyProxy httpPath)) => Show (MyProxy (c :/: httpPath)) where
show (MyProxy :: MyProxy (c :/: httpPath)) = show (MyProxy :: MyProxy c) ++ "/" ++ show (MyProxy :: MyProxy httpPath)
instance Show (MyProxy Nil) where
show _ = ""
-- | Extract a constant of a path component.
sText :: (KnownSymbol path) => Proxy (S path) -> Text
sText (_ :: Proxy (S path)) = pack $ symbolVal (Proxy :: Proxy path)
pathHead :: Proxy (path :/: rest) -> Proxy path
pathHead Proxy = Proxy
pathTail :: Proxy (path :/: rest) -> Proxy rest
pathTail Proxy = Proxy
-- | To properly serialise HttpPathFn path arguments we need a dedicated
-- typeclass, as opposed to just Show or ToJson. Instances of this class should
-- take care to obey the restrictions of resource paths in Http.
-- (Alternatively we could just urlencode the output of 'Show')
class HttpPathArgument a where
-- | Serialize a path argument.
toPathArg :: a -> Text
-- | Deserialize a path argument.
fromPathArg :: Text -> Maybe a
-- | This instance relies on 'Show'. Since Data.Text is all about efficiency
-- due to absence of list-based strings, it would be defeating to export this
-- instance in its current form.
instance HttpPathArgument Int where
toPathArg = pack . show
fromPathArg txt = case reads $ unpack txt of
[(i, "")] -> Just i
_ -> Nothing
-- | A type class for specifying request/response payload encodings.
class PayloadEncoding enc where
type Encoder enc a ::Constraint
type EncodedRepr enc :: *
payloadDecode :: Encoder enc a => Proxy enc -> EncodedRepr enc -> Maybe a
payloadEncode :: Encoder enc a => Proxy enc -> a -> EncodedRepr enc
payloadMimeType :: Proxy enc -> Text
| plcplc/typed-rest | types/src/Network/HTTP/Rest/Signature.hs | gpl-3.0 | 4,700 | 0 | 12 | 865 | 920 | 523 | 397 | 72 | 1 |
module FQuoter.Display
( displayQuotes )
where
import System.Console.Haskeline
import Data.Maybe
import Data.List
import FQuoter.Quote
import FQuoter.Config.Config
import FQuoter.Templating.TemplateParser
import FQuoter.Templating.Display
import FQuoter.Serialize.SerializedTypes
displayQuotes :: Config -> [SerializedType] -> InputT IO ()
displayQuotes _ [] = outputStrLn "No result !"
displayQuotes conf st
= case parser of
Left err -> do let msg = ["Configuration file faulty."
,show err
,"Template cannot be parsed."
,"Falling back on default config."]
mapM_ outputStrLn msg
displayQuotes buildDefaultConfig st
Right x -> mapM_ (displayQuote x) st
where
parser = parseTemplate (currentTemplate conf)
displayQuote template (SQuote q) = mapM_ outputStrLn $ catMaybes displayed
where
displayed = [Just line
, Just $ "\"" ++ content q ++ "\""
, Just ""
, Just $ readTree template q
, comment q
, Just ""
, outputTagsArray $ tags q
, Just line]
outputTagsArray [] = Nothing
outputTagsArray xs = Just $ "Tags : " ++ intercalate "," xs
line = replicate 80 '-'
displayQuote _ _ = error "Not a quote. Should not happen."
| Raveline/FQuoter | src/FQuoter/Display.hs | gpl-3.0 | 1,506 | 0 | 14 | 555 | 350 | 179 | 171 | 35 | 2 |
-- | Definition of the syntax of formulas of first order, signatures,
-- and models; evaluation of formulas under an interpretation for the
-- signature.
module Sat.Core where
import Control.Applicative
import Control.Lens
import Data.Serialize
import qualified Data.Map as M
import qualified Data.Set as S
data Variable = Variable String
deriving (Eq,Ord,Show)
instance Serialize Variable where
put (Variable str) = put str
get = Variable <$> get
data Constant = Constant String
deriving (Eq,Ord,Show)
constName :: Constant -> String
constName (Constant str) = str
constn :: Lens' Constant String
constn = lens constName (const Constant)
strConst :: Lens' String Constant
strConst = lens Constant (const constName)
instance Serialize Constant where
put (Constant str) = put str
get = Constant <$> get
conName :: Constant -> String
conName = (^. constn)
data Function = Function { fname :: String
, farity :: Int
}
deriving (Eq,Ord,Show)
instance Serialize Function where
put (Function name arity) = put name >> put arity
get = Function <$> get <*> get
data Relation = Relation { rname :: String
, rarity :: Int
}
deriving (Eq,Ord,Show)
instance Serialize Relation where
put (Relation name arity) = put name >> put arity
get = Relation <$> get <*> get
-- Los predicados son relaciones unarias, pero las representamos
-- con un tipo de datos separado ya que tendrán especial importancia.
data Predicate = Predicate { pname :: String
, pmain :: Bool
}
deriving (Eq,Ord,Show)
instance Serialize Predicate where
put (Predicate str b) = put str >> put b
get = Predicate <$> get <*> get
data Signature = Signature { constants :: S.Set Constant
, functions :: S.Set Function
, predicates :: S.Set Predicate
, relations :: S.Set Relation
}
instance Serialize Signature where
put (Signature c f p r) = put c >> put f >> put p >> put r
get = Signature <$> get <*> get <*> get <*> get
-- | Terms.
data Term = Var Variable | Con Constant | Fun Function [Term]
deriving Show
isTermOfTao :: Term -> Signature -> Bool
isTermOfTao (Var _) _ = True
isTermOfTao (Con c) s = S.member c (constants s)
isTermOfTao (Fun f terms) s =
S.member f (functions s) &&
length terms == (farity f) &&
all (flip isTermOfTao s) terms
freeVars :: Term -> S.Set Variable
freeVars (Var v) = S.singleton v
freeVars (Con _) = S.empty
freeVars (Fun _ ts) = S.unions $ map freeVars ts
-- | Well-formed formulas.
data Formula = FTrue | FFalse
| Eq Term Term
| Neg Formula
| And Formula Formula | Or Formula Formula
| Impl Formula Formula | Equiv Formula Formula
| ForAll Variable Formula | Exist Variable Formula
| Pred Predicate Term
| Rel Relation [Term]
deriving Show
-- | We can only evaluate closed formulas
isClosed :: Formula -> Bool
isClosed = isClosed' S.empty
where isClosed' :: S.Set Variable -> Formula -> Bool
isClosed' _ FTrue = True
isClosed' _ FFalse = True
isClosed' bv (Pred _ t) = S.null $ freeVars t S.\\ bv
isClosed' bv (Rel _ ts) = S.null $ (S.unions $ map freeVars ts) S.\\ bv
isClosed' bv (Eq t t') = S.null $ (freeVars t `S.union` freeVars t') S.\\ bv
isClosed' bv (Neg f) = isClosed' bv f
isClosed' bv (And f f') = isClosed' bv f && isClosed' bv f'
isClosed' bv (Or f f') = isClosed' bv f && isClosed' bv f'
isClosed' bv (Impl f f') = isClosed' bv f && isClosed' bv f'
isClosed' bv (Equiv f f') = isClosed' bv f && isClosed' bv f'
isClosed' bv (ForAll v f) = isClosed' (S.insert v bv) f
isClosed' bv (Exist v f) = isClosed' (S.insert v bv) f
-- Un Modelo es una interpretación de una signatura, dentro de un universo.
-- "subuniv" es el subconjunto (en gral finito) de los elementos del universo
-- que ocurren en las interpretaciones de la signatura.
-- | Model with the universe of discourse as a parameter.
data Model univ = Model { interpConstants :: M.Map Constant univ
, interpFunctions :: M.Map Function ([univ] -> univ)
, interpRelations :: M.Map Relation [[univ]]
, interpPredicates :: M.Map Predicate [univ]
, subuniv :: [univ]
}
instance (Show u) => Show (Model u) where
show (Model { interpConstants = ic
, interpFunctions = _
, interpRelations = ir
, interpPredicates = ip
, subuniv = su
}) = "Model \n\tConstants= {"++ show ic ++"}\n\t"++
"Relations= {"++ show ir ++"}\n\t"++
"Predicates= {"++ show ip ++"}\n\t"++
"subuniv= {"++ show su ++"}\n\t"
-- | Interpretation for free-variables.
type Env a = M.Map Variable a
-- | Evaluation of terms under an environment.
evalTerm :: Model a -> Env a -> Term -> a
evalTerm _ e (Var v) = maybe (error "Variable libre") id $ M.lookup v e
evalTerm m _ (Con c) = maybe (error "") id $ M.lookup c (interpConstants m)
evalTerm m e (Fun f ts) = maybe (error "") ($ (map (evalTerm m e) ts)) $ M.lookup f (interpFunctions m)
-- | Evaluation of formulas under a model and an environment. This
-- function is total only for finite models.
eval :: (Eq a) => Formula -> Model a -> Env a -> Bool
eval FTrue _ _ = True
eval FFalse _ _ = False
eval (Eq t t') m e = evalTerm m e t == evalTerm m e t'
eval (And p q) m e = eval p m e && eval q m e
eval (Or p q) m e = eval p m e || eval q m e
eval (Impl p q) m e = eval p m e <= eval q m e
eval (Equiv p q) m e = eval p m e == eval q m e
eval (Neg p) m e = not $ eval p m e
eval (ForAll v p) m e = and $ map (\a -> eval p m (M.insert v a e)) (subuniv m)
eval (Exist v p) m e = or $ map (\a -> eval p m (M.insert v a e)) (subuniv m)
eval (Pred p t) m e = maybe False (any ((==) $ evalTerm m e t)) $ M.lookup p (interpPredicates m)
eval (Rel r ts) m e = maybe False (any (map (evalTerm m e) ts ==)) $ M.lookup r (interpRelations m)
| manugunther/sat | Sat/Core.hs | gpl-3.0 | 6,364 | 0 | 17 | 1,915 | 2,277 | 1,171 | 1,106 | 119 | 12 |
import Development.Hake
import Development.Hake.FunSetIO
import System.Environment (getArgs)
import Control.Applicative ((<$>))
main = do
args <- getArgs
let add = getVals "add" args
add2 = getVals "add2" args
hake [
dflt [ "task1" ]
,
file [ "task1" ] [ "task2" ] $ rawSystemE . ("echo":) . (:[])
,
task "task2" $ do
rawSystemE [ "echo", "task2" ] `orDie` show
(case add of
[] -> flip orDie show
_ -> (>> rawSystemE add `orDie` show)) $
rawSystemE [ "./exitWith", "123" ]
rawSystemE [ "echo", "task2", "running" ]
(case add2 of
[] -> (>> return ExitSuccess) . flip orDie show
_ -> (>> rawSystemE add2)) $
rawSystemE [ "./exitWith", "123" ]
]
| YoshikuniJujo/hake_haskell | examples/exitTest/hakeMainIO.hs | gpl-3.0 | 745 | 2 | 19 | 212 | 275 | 147 | 128 | 22 | 3 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.BigQuery.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.BigQuery.Types
(
-- * Service Configuration
bigQueryService
-- * OAuth Scopes
, cloudPlatformReadOnlyScope
, cloudPlatformScope
, storageReadOnlyScope
, bigQueryInsertDataScope
, storageReadWriteScope
, bigQueryScope
, storageFullControlScope
-- * JobReference
, JobReference
, jobReference
, jrJobId
, jrProjectId
-- * TableList
, TableList
, tableList
, tlTotalItems
, tlEtag
, tlNextPageToken
, tlKind
, tlTables
-- * DataSetListDataSetsItem
, DataSetListDataSetsItem
, dataSetListDataSetsItem
, dsldsiFriendlyName
, dsldsiKind
, dsldsiDataSetReference
, dsldsiId
, dsldsiLabels
-- * TableDataList
, TableDataList
, tableDataList
, tdlEtag
, tdlKind
, tdlRows
, tdlPageToken
, tdlTotalRows
-- * JobConfigurationTableCopy
, JobConfigurationTableCopy
, jobConfigurationTableCopy
, jctcDestinationTable
, jctcWriteDisPosition
, jctcSourceTables
, jctcCreateDisPosition
, jctcSourceTable
-- * TableListTablesItem
, TableListTablesItem
, tableListTablesItem
, tltiTableReference
, tltiFriendlyName
, tltiKind
, tltiId
, tltiLabels
, tltiType
-- * TableSchema
, TableSchema
, tableSchema
, tsFields
-- * ProjectList
, ProjectList
, projectList
, plTotalItems
, plEtag
, plNextPageToken
, plKind
, plProjects
-- * ExplainQueryStep
, ExplainQueryStep
, explainQueryStep
, eqsSubsteps
, eqsKind
-- * QueryParameterTypeStructTypesItem
, QueryParameterTypeStructTypesItem
, queryParameterTypeStructTypesItem
, qptstiName
, qptstiType
, qptstiDescription
-- * BigtableColumnFamily
, BigtableColumnFamily
, bigtableColumnFamily
, bcfFamilyId
, bcfColumns
, bcfOnlyReadLatest
, bcfType
, bcfEncoding
-- * JobStatistics
, JobStatistics
, jobStatistics
, jsCreationTime
, jsStartTime
, jsLoad
, jsTotalBytesProcessed
, jsEndTime
, jsQuery
, jsExtract
-- * JobConfigurationLabels
, JobConfigurationLabels
, jobConfigurationLabels
, jclAddtional
-- * DataSet
, DataSet
, dataSet
, dsCreationTime
, dsAccess
, dsEtag
, dsLocation
, dsFriendlyName
, dsKind
, dsLastModifiedTime
, dsDataSetReference
, dsSelfLink
, dsId
, dsLabels
, dsDefaultTableExpirationMs
, dsDescription
-- * BigtableOptions
, BigtableOptions
, bigtableOptions
, boReadRowkeyAsString
, boIgnoreUnspecifiedColumnFamilies
, boColumnFamilies
-- * ExternalDataConfiguration
, ExternalDataConfiguration
, externalDataConfiguration
, edcBigtableOptions
, edcIgnoreUnknownValues
, edcCompression
, edcSourceFormat
, edcSchema
, edcMaxBadRecords
, edcGoogleSheetsOptions
, edcAutodetect
, edcSourceURIs
, edcCSVOptions
-- * TableReference
, TableReference
, tableReference
, trDataSetId
, trProjectId
, trTableId
-- * TableFieldSchema
, TableFieldSchema
, tableFieldSchema
, tfsMode
, tfsName
, tfsType
, tfsDescription
, tfsFields
-- * GetQueryResultsResponse
, GetQueryResultsResponse
, getQueryResultsResponse
, gqrrJobReference
, gqrrEtag
, gqrrKind
, gqrrSchema
, gqrrTotalBytesProcessed
, gqrrRows
, gqrrPageToken
, gqrrNumDmlAffectedRows
, gqrrTotalRows
, gqrrErrors
, gqrrJobComplete
, gqrrCacheHit
-- * DataSetList
, DataSetList
, dataSetList
, dslEtag
, dslNextPageToken
, dslKind
, dslDataSets
-- * QueryRequest
, QueryRequest
, queryRequest
, qrUseQueryCache
, qrPreserveNulls
, qrKind
, qrQueryParameters
, qrQuery
, qrParameterMode
, qrTimeoutMs
, qrUseLegacySQL
, qrDryRun
, qrMaxResults
, qrDefaultDataSet
-- * JobsListProjection
, JobsListProjection (..)
-- * QueryParameter
, QueryParameter
, queryParameter
, qpParameterValue
, qpParameterType
, qpName
-- * JobStatistics4
, JobStatistics4
, jobStatistics4
, jsDestinationURIFileCounts
-- * ProjectReference
, ProjectReference
, projectReference
, prProjectId
-- * ExplainQueryStage
, ExplainQueryStage
, explainQueryStage
, eqsStatus
, eqsWaitRatioMax
, eqsRecordsWritten
, eqsSteps
, eqsWriteRatioAvg
, eqsRecordsRead
, eqsComputeRatioAvg
, eqsName
, eqsReadRatioMax
, eqsWaitRatioAvg
, eqsId
, eqsComputeRatioMax
, eqsWriteRatioMax
, eqsReadRatioAvg
-- * JobConfigurationLoad
, JobConfigurationLoad
, jobConfigurationLoad
, jclSkipLeadingRows
, jclProjectionFields
, jclDestinationTable
, jclWriteDisPosition
, jclAllowJaggedRows
, jclSchemaInline
, jclIgnoreUnknownValues
, jclSchemaUpdateOptions
, jclCreateDisPosition
, jclSchemaInlineFormat
, jclAllowQuotedNewlines
, jclSourceFormat
, jclSchema
, jclQuote
, jclMaxBadRecords
, jclAutodetect
, jclSourceURIs
, jclEncoding
, jclFieldDelimiter
, jclNullMarker
-- * JobsListStateFilter
, JobsListStateFilter (..)
-- * DataSetReference
, DataSetReference
, dataSetReference
, dsrDataSetId
, dsrProjectId
-- * TableDataInsertAllRequest
, TableDataInsertAllRequest
, tableDataInsertAllRequest
, tdiarKind
, tdiarIgnoreUnknownValues
, tdiarRows
, tdiarTemplateSuffix
, tdiarSkipInvalidRows
-- * ProjectListProjectsItem
, ProjectListProjectsItem
, projectListProjectsItem
, plpiFriendlyName
, plpiKind
, plpiProjectReference
, plpiId
, plpiNumericId
-- * BigtableColumn
, BigtableColumn
, bigtableColumn
, bcQualifierEncoded
, bcFieldName
, bcQualifierString
, bcOnlyReadLatest
, bcType
, bcEncoding
-- * Streamingbuffer
, Streamingbuffer
, streamingbuffer
, sEstimatedBytes
, sOldestEntryTime
, sEstimatedRows
-- * TableRow
, TableRow
, tableRow
, trF
-- * JobListJobsItem
, JobListJobsItem
, jobListJobsItem
, jljiJobReference
, jljiStatus
, jljiState
, jljiUserEmail
, jljiKind
, jljiErrorResult
, jljiId
, jljiStatistics
, jljiConfiguration
-- * TimePartitioning
, TimePartitioning
, timePartitioning
, tpExpirationMs
, tpType
-- * QueryParameterValueStructValues
, QueryParameterValueStructValues
, queryParameterValueStructValues
, qpvsvAddtional
-- * DataSetLabels
, DataSetLabels
, dataSetLabels
, dslAddtional
-- * JobConfiguration
, JobConfiguration
, jobConfiguration
, jcCopy
, jcLoad
, jcQuery
, jcExtract
, jcLabels
, jcDryRun
-- * Job
, Job
, job
, jJobReference
, jStatus
, jEtag
, jUserEmail
, jKind
, jSelfLink
, jId
, jStatistics
, jConfiguration
-- * TableDataInsertAllResponseInsertErrorsItem
, TableDataInsertAllResponseInsertErrorsItem
, tableDataInsertAllResponseInsertErrorsItem
, tdiarieiErrors
, tdiarieiIndex
-- * JobConfigurationExtract
, JobConfigurationExtract
, jobConfigurationExtract
, jceDestinationFormat
, jceSourceTable
, jcePrintHeader
, jceCompression
, jceDestinationURIs
, jceDestinationURI
, jceFieldDelimiter
-- * JobCancelResponse
, JobCancelResponse
, jobCancelResponse
, jcrKind
, jcrJob
-- * JSONObject
, JSONObject
, jsonObject
, joAddtional
-- * JobConfigurationQuery
, JobConfigurationQuery
, jobConfigurationQuery
, jcqDestinationTable
, jcqWriteDisPosition
, jcqPriority
, jcqUseQueryCache
, jcqPreserveNulls
, jcqTableDefinitions
, jcqQueryParameters
, jcqSchemaUpdateOptions
, jcqMaximumBytesBilled
, jcqCreateDisPosition
, jcqUserDefinedFunctionResources
, jcqAllowLargeResults
, jcqMaximumBillingTier
, jcqQuery
, jcqFlattenResults
, jcqParameterMode
, jcqUseLegacySQL
, jcqDefaultDataSet
-- * GoogleSheetsOptions
, GoogleSheetsOptions
, googleSheetsOptions
, gsoSkipLeadingRows
-- * TableDataInsertAllRequestRowsItem
, TableDataInsertAllRequestRowsItem
, tableDataInsertAllRequestRowsItem
, tdiarriJSON
, tdiarriInsertId
-- * JobList
, JobList
, jobList
, jlEtag
, jlNextPageToken
, jlKind
, jlJobs
-- * JobConfigurationQueryTableDefinitions
, JobConfigurationQueryTableDefinitions
, jobConfigurationQueryTableDefinitions
, jcqtdAddtional
-- * TableCell
, TableCell
, tableCell
, tcV
-- * QueryParameterValue
, QueryParameterValue
, queryParameterValue
, qpvStructValues
, qpvValue
, qpvArrayValues
-- * ViewDefinition
, ViewDefinition
, viewDefinition
, vdUserDefinedFunctionResources
, vdQuery
, vdUseLegacySQL
-- * UserDefinedFunctionResource
, UserDefinedFunctionResource
, userDefinedFunctionResource
, udfrResourceURI
, udfrInlineCode
-- * JobStatistics2
, JobStatistics2
, jobStatistics2
, jSchema
, jTotalBytesProcessed
, jBillingTier
, jUndeclaredQueryParameters
, jReferencedTables
, jStatementType
, jNumDmlAffectedRows
, jQueryPlan
, jCacheHit
, jTotalBytesBilled
-- * JobStatus
, JobStatus
, jobStatus
, jsState
, jsErrorResult
, jsErrors
-- * TableLabels
, TableLabels
, tableLabels
, tlAddtional
-- * DataSetAccessItem
, DataSetAccessItem
, dataSetAccessItem
, dsaiGroupByEmail
, dsaiDomain
, dsaiSpecialGroup
, dsaiRole
, dsaiView
, dsaiUserByEmail
-- * TableDataInsertAllResponse
, TableDataInsertAllResponse
, tableDataInsertAllResponse
, tKind
, tInsertErrors
-- * QueryParameterType
, QueryParameterType
, queryParameterType
, qptStructTypes
, qptType
, qptArrayType
-- * Table
, Table
, table
, tabCreationTime
, tabEtag
, tabNumBytes
, tabExternalDataConfiguration
, tabLocation
, tabTableReference
, tabFriendlyName
, tabKind
, tabLastModifiedTime
, tabSchema
, tabStreamingBuffer
, tabSelfLink
, tabTimePartitioning
, tabNumRows
, tabView
, tabId
, tabLabels
, tabType
, tabNumLongTermBytes
, tabExpirationTime
, tabDescription
-- * ErrorProto
, ErrorProto
, errorProto
, epDebugInfo
, epLocation
, epReason
, epMessage
-- * CSVOptions
, CSVOptions
, csvOptions
, coSkipLeadingRows
, coAllowJaggedRows
, coAllowQuotedNewlines
, coQuote
, coEncoding
, coFieldDelimiter
-- * JobStatistics3
, JobStatistics3
, jobStatistics3
, jsInputFiles
, jsOutputRows
, jsOutputBytes
, jsInputFileBytes
-- * QueryResponse
, QueryResponse
, queryResponse
, qJobReference
, qKind
, qSchema
, qTotalBytesProcessed
, qRows
, qPageToken
, qNumDmlAffectedRows
, qTotalRows
, qErrors
, qJobComplete
, qCacheHit
-- * DataSetListDataSetsItemLabels
, DataSetListDataSetsItemLabels
, dataSetListDataSetsItemLabels
, dsldsilAddtional
-- * TableListTablesItemLabels
, TableListTablesItemLabels
, tableListTablesItemLabels
, tltilAddtional
) where
import Network.Google.BigQuery.Types.Product
import Network.Google.BigQuery.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v2' of the BigQuery API. This contains the host and root path used as a starting point for constructing service requests.
bigQueryService :: ServiceConfig
bigQueryService
= defaultService (ServiceId "bigquery:v2")
"www.googleapis.com"
-- | View your data across Google Cloud Platform services
cloudPlatformReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform.read-only"]
cloudPlatformReadOnlyScope = Proxy;
-- | View and manage your data across Google Cloud Platform services
cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"]
cloudPlatformScope = Proxy;
-- | View your data in Google Cloud Storage
storageReadOnlyScope :: Proxy '["https://www.googleapis.com/auth/devstorage.read_only"]
storageReadOnlyScope = Proxy;
-- | Insert data into Google BigQuery
bigQueryInsertDataScope :: Proxy '["https://www.googleapis.com/auth/bigquery.insertdata"]
bigQueryInsertDataScope = Proxy;
-- | Manage your data in Google Cloud Storage
storageReadWriteScope :: Proxy '["https://www.googleapis.com/auth/devstorage.read_write"]
storageReadWriteScope = Proxy;
-- | View and manage your data in Google BigQuery
bigQueryScope :: Proxy '["https://www.googleapis.com/auth/bigquery"]
bigQueryScope = Proxy;
-- | Manage your data and permissions in Google Cloud Storage
storageFullControlScope :: Proxy '["https://www.googleapis.com/auth/devstorage.full_control"]
storageFullControlScope = Proxy;
| rueshyna/gogol | gogol-bigquery/gen/Network/Google/BigQuery/Types.hs | mpl-2.0 | 13,914 | 0 | 7 | 3,622 | 1,664 | 1,127 | 537 | 489 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.ServiceUsage.Services.BatchEnable
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Enable multiple services on a project. The operation is atomic: if
-- enabling any service fails, then the entire batch fails, and no state
-- changes occur. To enable a single service, use the \`EnableService\`
-- method instead.
--
-- /See:/ <https://cloud.google.com/service-usage/ Service Usage API Reference> for @serviceusage.services.batchEnable@.
module Network.Google.Resource.ServiceUsage.Services.BatchEnable
(
-- * REST Resource
ServicesBatchEnableResource
-- * Creating a Request
, servicesBatchEnable
, ServicesBatchEnable
-- * Request Lenses
, sbeParent
, sbeXgafv
, sbeUploadProtocol
, sbeAccessToken
, sbeUploadType
, sbePayload
, sbeCallback
) where
import Network.Google.Prelude
import Network.Google.ServiceUsage.Types
-- | A resource alias for @serviceusage.services.batchEnable@ method which the
-- 'ServicesBatchEnable' request conforms to.
type ServicesBatchEnableResource =
"v1" :>
Capture "parent" Text :>
"services:batchEnable" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] BatchEnableServicesRequest :>
Post '[JSON] Operation
-- | Enable multiple services on a project. The operation is atomic: if
-- enabling any service fails, then the entire batch fails, and no state
-- changes occur. To enable a single service, use the \`EnableService\`
-- method instead.
--
-- /See:/ 'servicesBatchEnable' smart constructor.
data ServicesBatchEnable =
ServicesBatchEnable'
{ _sbeParent :: !Text
, _sbeXgafv :: !(Maybe Xgafv)
, _sbeUploadProtocol :: !(Maybe Text)
, _sbeAccessToken :: !(Maybe Text)
, _sbeUploadType :: !(Maybe Text)
, _sbePayload :: !BatchEnableServicesRequest
, _sbeCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ServicesBatchEnable' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'sbeParent'
--
-- * 'sbeXgafv'
--
-- * 'sbeUploadProtocol'
--
-- * 'sbeAccessToken'
--
-- * 'sbeUploadType'
--
-- * 'sbePayload'
--
-- * 'sbeCallback'
servicesBatchEnable
:: Text -- ^ 'sbeParent'
-> BatchEnableServicesRequest -- ^ 'sbePayload'
-> ServicesBatchEnable
servicesBatchEnable pSbeParent_ pSbePayload_ =
ServicesBatchEnable'
{ _sbeParent = pSbeParent_
, _sbeXgafv = Nothing
, _sbeUploadProtocol = Nothing
, _sbeAccessToken = Nothing
, _sbeUploadType = Nothing
, _sbePayload = pSbePayload_
, _sbeCallback = Nothing
}
-- | Parent to enable services on. An example name would be:
-- \`projects\/123\` where \`123\` is the project number. The
-- \`BatchEnableServices\` method currently only supports projects.
sbeParent :: Lens' ServicesBatchEnable Text
sbeParent
= lens _sbeParent (\ s a -> s{_sbeParent = a})
-- | V1 error format.
sbeXgafv :: Lens' ServicesBatchEnable (Maybe Xgafv)
sbeXgafv = lens _sbeXgafv (\ s a -> s{_sbeXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
sbeUploadProtocol :: Lens' ServicesBatchEnable (Maybe Text)
sbeUploadProtocol
= lens _sbeUploadProtocol
(\ s a -> s{_sbeUploadProtocol = a})
-- | OAuth access token.
sbeAccessToken :: Lens' ServicesBatchEnable (Maybe Text)
sbeAccessToken
= lens _sbeAccessToken
(\ s a -> s{_sbeAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
sbeUploadType :: Lens' ServicesBatchEnable (Maybe Text)
sbeUploadType
= lens _sbeUploadType
(\ s a -> s{_sbeUploadType = a})
-- | Multipart request metadata.
sbePayload :: Lens' ServicesBatchEnable BatchEnableServicesRequest
sbePayload
= lens _sbePayload (\ s a -> s{_sbePayload = a})
-- | JSONP
sbeCallback :: Lens' ServicesBatchEnable (Maybe Text)
sbeCallback
= lens _sbeCallback (\ s a -> s{_sbeCallback = a})
instance GoogleRequest ServicesBatchEnable where
type Rs ServicesBatchEnable = Operation
type Scopes ServicesBatchEnable =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/service.management"]
requestClient ServicesBatchEnable'{..}
= go _sbeParent _sbeXgafv _sbeUploadProtocol
_sbeAccessToken
_sbeUploadType
_sbeCallback
(Just AltJSON)
_sbePayload
serviceUsageService
where go
= buildClient
(Proxy :: Proxy ServicesBatchEnableResource)
mempty
| brendanhay/gogol | gogol-serviceusage/gen/Network/Google/Resource/ServiceUsage/Services/BatchEnable.hs | mpl-2.0 | 5,639 | 0 | 17 | 1,276 | 789 | 463 | 326 | 115 | 1 |
-- | Parsing of structs.
module Data.GI.GIR.Struct
( Struct(..)
, parseStruct
) where
import Data.Text (Text)
import Data.GI.GIR.Allocation (AllocationInfo(..), unknownAllocationInfo)
import Data.GI.GIR.Field (Field, parseFields)
import Data.GI.GIR.Method (Method, MethodType(..), parseMethod)
import Data.GI.GIR.Parser
data Struct = Struct {
structIsBoxed :: Bool,
structAllocationInfo :: AllocationInfo,
structTypeInit :: Maybe Text,
structSize :: Int,
gtypeStructFor :: Maybe Name,
-- https://bugzilla.gnome.org/show_bug.cgi?id=560248
structIsDisguised :: Bool,
structFields :: [Field],
structMethods :: [Method],
structDeprecated :: Maybe DeprecationInfo,
structDocumentation :: Maybe Documentation }
deriving Show
parseStruct :: Parser (Name, Struct)
parseStruct = do
name <- parseName
deprecated <- parseDeprecation
doc <- parseDocumentation
structFor <- queryAttrWithNamespace GLibGIRNS "is-gtype-struct-for" >>= \case
Just t -> (fmap Just . qualifyName) t
Nothing -> return Nothing
typeInit <- queryAttrWithNamespace GLibGIRNS "get-type"
disguised <- optionalAttr "disguised" False parseBool
fields <- parseFields
constructors <- parseChildrenWithLocalName "constructor" (parseMethod Constructor)
methods <- parseChildrenWithLocalName "method" (parseMethod OrdinaryMethod)
functions <- parseChildrenWithLocalName "function" (parseMethod MemberFunction)
return (name,
Struct {
structIsBoxed = error ("[boxed] unfixed struct " ++ show name)
, structAllocationInfo = unknownAllocationInfo
, structTypeInit = typeInit
, structSize = error ("[size] unfixed struct " ++ show name)
, gtypeStructFor = structFor
, structIsDisguised = disguised
, structFields = fields
, structMethods = constructors ++ methods ++ functions
, structDeprecated = deprecated
, structDocumentation = doc
})
| hamishmack/haskell-gi | lib/Data/GI/GIR/Struct.hs | lgpl-2.1 | 2,024 | 0 | 15 | 443 | 469 | 263 | 206 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Lens
import Control.Monad.IO.Class
import qualified Control.Monad.RWS.Strict as RWS
------------------------------------------------------------------------------
import qualified LogData as L
import qualified RetrofitDebug as L
------------------------------------------------------------------------------
import Raft hiding (server)
import Types
main :: IO ()
main =
let (s, e) = mkSpecStateEnv
in RWS.void $ RWS.evalRWST server e s
server :: Raft IO a ()
server = do
r <- RWS.ask
liftIO (putStrLn "")
L.debugInfo L.exampleLogList'
liftIO (putStrLn "")
L.debugInfo $ L.debugUnexpectedS [L.TXT "internal error, there should be a next log entry"]
let me = r^.cfg.nodeId
liftIO (putStrLn "")
L.debugRaftHandler AE [L.NID me, L.TXT "sandbagging"] -- , show ae
return ()
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/logging/structured-log-lines/app/Main.hs | unlicense | 952 | 0 | 11 | 219 | 247 | 130 | 117 | -1 | -1 |
rev = foldl (flip (:)) []
main = do
inputdata <- getContents
mapM_ putStrLn $ map show $ rev $ map (read :: String -> Int) $ lines inputdata
| itsbruce/hackerrank | func/intro/reverse.hs | unlicense | 148 | 0 | 11 | 36 | 74 | 36 | 38 | 4 | 1 |
module Utils.Step where
import Data.Const
import Data.World
import Data.Monster
import Data.Define
import Utils.Changes
import Utils.Monsters
import Utils.Stuff
import Utils.HealDamage
import Utils.Items
import Items.Stuff
import Monsters.Wave
import Monsters.Parts
import IO.Colors
import IO.Texts
import System.Random (StdGen, randomR)
import Data.List (sort)
import Data.Maybe (mapMaybe)
import qualified Data.Map as M
import qualified Data.Array as A
import Data.Functor ((<$>))
-- | return pair with minimum second element
minSnd :: (Ord b) => (a,b) -> (a,b) -> (a,b)
minSnd x y = if snd x > snd y then y else x
-- | find pair with minimum second element in the 'Map'
minValue :: (Ord k, Ord a) => M.Map k a -> (k, a)
minValue m = foldr1 minSnd $ M.toList m
-- | find key of pair with minimal second elenent 'on' some function
minimumOn :: (Ord b, Ord k) => (a -> b) -> M.Map k a -> (k, a)
minimumOn f m = (k, m M.! k) where
k = fst $ minValue $ f <$> m
-- | return 'time' if monster is alive and 0 else
almostTime :: Monster -> Int
almostTime mon = if alive mon then time mon else 0
-- | find next monster (with minimum 'almostTime')
updateFirst :: World -> World
updateFirst w = w {units' = newUnits} where
newUnits = (units' w) {
xF = x,
yF = y,
getFirst' = monNew
}
((x, y), monNew) = minimumOn almostTime $ units w
-- | spawn new wave if it needs
newWaveIf :: World -> World
newWaveIf world =
if not (isPlayerNow world) ||
levelW world * 3 > wave world then newWorld
else callUpon world
where newWorld = cycleWorld world
-- | change current monster to the next and update all
cycleWorld :: World -> World
cycleWorld w = rotAll $ tempFirst $ actTrapFirst $ regFirst $ cleanFirst
$ (addMessages (msgCleanParts monNew) newWorld) {units' = newUnits} where
newUnits = (units' newWorld) {
xF = x,
yF = y,
getFirst' = monNew
}
((x, y), monNew) = minimumOn almostTime $ units newWorld
newWorld = tickFirst w
-- | remove lost parts and add partial corpses
cleanFirst :: World -> World
cleanFirst w = changeMon (cleanParts $ getFirst w) $ dropPartialCorpse w
-- | remove first monster
remFirst :: World -> World
remFirst world = updateFirst $ world {action = Move, units' =
deleteU (xFirst world, yFirst world) $ units' world}
-- | find closest monster with ai == 'You'
closestPlayerChar :: Int -> Int -> World -> Maybe (Int, Int)
closestPlayerChar x y w =
if M.null yous || abs (x - xP) > xSight || abs (y - yP) > ySight
then Nothing
else Just (xP, yP)
where
yous = M.filter (\q -> case ai q of
You -> True
_ -> False) $ units w
closest (x1,y1) (x2,y2) =
if max (abs $ x1 - x) (abs $ y1 - y) >
max (abs $ x2 - x) (abs $ y2 - y)
then (x2, y2)
else (x1, y1)
(xP, yP) = foldr1 closest $ M.keys yous
-- | decrease temporaty effects of the first monster
tempFirst :: World -> World
tempFirst w = changeMon newMon w where
mon = getFirst w
newMon = mon {temp = decMaybe <$> temp mon}
-- | decrease value in Maybe Int
decMaybe :: Maybe Int -> Maybe Int
decMaybe Nothing = Nothing
decMaybe (Just n) = if n <= 0 then Nothing else Just (n - 1)
-- | add death drop and corpse to the monster inventory
addDeathDrop :: Monster -> StdGen -> (Monster, StdGen)
addDeathDrop mon g = (mon {inv = addCorpse
$ M.union (inv mon) newDrop}, newGen) where
(newDrop, newGen) = deathDrop (idM mon) g
corpse = corpseFromMon mon
addCorpse = if idM mon `elem` noCorpses
then id
else case nutrition corpse of
0 -> id
_ -> M.insert (head notAlphabet) (corpse, 1)
-- | update 'time' for first monster
tickFirst :: World -> World
tickFirst w = changeMon (tickFirstMon $ getFirst w) w where
tickFirstMon :: Monster -> Monster
tickFirstMon m = m {time = effectiveSlowness m + time m}
-- | list of indices of items with given predicate
listOfValidChars :: (Object -> Bool) -> World -> String
listOfValidChars f world = sort $ M.keys
$ M.filter (f . fst) $ inv $ getFirst world
-- | do some action if it was correct
doIfCorrect :: (World, Bool) -> Either World a
doIfCorrect (rez, correct) =
if correct
then Left $ newWaveIf rez
else Left rez
-- | act trap on the first monster
actTrapFirst :: World -> World
actTrapFirst w = addMessage (newMsg, red) $ changeMon newMon w {stdgen = g} where
x = xFirst w
y = yFirst w
mon = getFirst w
trap = terrain $ worldmap w A.! (x,y)
fireTrapped = (dmgRandomElem Fire (Just 8) mon $ stdgen w,
if name mon == "You"
then msgFireYou
else name mon ++ msgFire)
magicTrapped = ((newMon', g''), msgWand (title obj) (name mon)) where
(ind, g') = randomR (0, length wands - 1) $ stdgen w
obj = wands !! ind
(newMon', g'') = act obj (mon, g')
((newMon, g), newMsg) = case trap of
Water -> if isFlying mon then ((mon, stdgen w), "")
else (dmgRandom (Just 100) mon (stdgen w),
if name mon == "You"
then msgDrownYou
else name mon ++ msgDrown)
FireTrap -> fireTrapped
Bonfire -> fireTrapped
PoisonTrap -> (randTemp Poison (5, 15) (mon, stdgen w),
if name mon == "You"
then msgPoisonYou
else name mon ++ msgPoison)
MagicTrap -> magicTrapped
MagicNatural -> magicTrapped
_ -> ((mon, stdgen w), "")
-- | call upon the new wave
callUpon :: World -> World
callUpon w = addMessage (msgLanding (wave w) , red)
$ newWave $ cycleWorld w {action = Move}
-- | drop partial corpses to cells near to the monster
dropPartialCorpse :: World -> World
dropPartialCorpse w =
if idM mon `elem` noCorpses then w
else (foldr ((.) . addItem . wrap . corpseFromPart mon) id
$ filter (not . aliveP) $ parts mon) w {stdgen = g'} where
mindx = if xFirst w == 0 then 0 else -1
maxdx = if xFirst w == maxX then 0 else 1
mindy = if yFirst w == 0 then 0 else -1
maxdy = if yFirst w == maxY then 0 else 1
(dx, g ) = randomR (mindx, maxdx) $ stdgen w
(dy, g') = randomR (mindy, maxdy) g
wrap obj = (xFirst w + dx, yFirst w + dy, obj, 1)
mon = getFirst w
-- | rot all corpses (both in inventory and on the ground)
rotAll :: World -> World
rotAll w = w {items = newItems, units' = (units' w) {list = newMons}} where
newMons = rotInv <$> units w
newItems = mapMaybe rotItemOnGround $ items w
rotItemOnGround arg@(x, y, obj, n)
| not $ isFood obj = Just arg
| rotRate obj >= rotTime obj = Nothing
| otherwise = Just (x, y, obj {rotTime = rotTime obj - rotRate obj}, n)
-- | rot all corpses in the inventory
rotInv :: Monster -> Monster
rotInv mon = mon {inv = M.mapMaybe rotItem $ inv mon} where
rotItem arg@(obj, n)
| not $ isFood obj = Just arg
| rotRate obj >= rotTime obj = Nothing
| otherwise = Just (obj {rotTime = rotTime obj - rotRate obj}, n)
| green-orange/trapHack | src/Utils/Step.hs | unlicense | 6,604 | 72 | 14 | 1,419 | 2,641 | 1,416 | 1,225 | 159 | 11 |
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE MonoLocalBinds #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
module Q where
{-
https://aphyr.com/posts/342-typing-the-technical-interview
Typing the technical interview 2017/04/10
-}
data Nil
data Cons x xs
class First list x | list -> x
instance First Nil Nil
instance First (Cons x more) x
class ListConcat a b c | a b -> c
instance ListConcat Nil x x
instance (ListConcat as bs cs)
=> ListConcat (Cons a as) bs (Cons a cs)
-- Concatenate all lists in a list
class ListConcatAll ls l | ls -> l
instance ListConcatAll Nil Nil
instance (ListConcat chunk acc result, ListConcatAll rest acc)
=> ListConcatAll (Cons chunk rest) result
data True
data False
-- Is any element of this list True?
class AnyTrue list t | list -> t
instance AnyTrue Nil False
instance AnyTrue (Cons True more) True
instance (AnyTrue list t)
=> AnyTrue (Cons False list) t
class Not b1 b | b1 -> b
instance Not False True
instance Not True False
class Or b1 b2 b | b1 b2 -> b
instance Or True True True
instance Or True False True
instance Or False True True
instance Or False False False
data Z
data S n
type N0 = Z
type N1 = S N0
type N2 = S N1
type N3 = S N2
type N4 = S N3
type N5 = S N4
type N6 = S N5
type N7 = S N6
type N8 = S N7
-- Equality
class PeanoEqual a b t | a b -> t
instance PeanoEqual Z Z True
instance PeanoEqual (S a) Z False
instance PeanoEqual Z (S b) False
instance (PeanoEqual a b t)
=> PeanoEqual (S a) (S b) t
-- Comparison (<)
class PeanoLT a b t | a b -> t
instance PeanoLT Z Z False
instance PeanoLT (S a) Z False
instance PeanoLT Z (S b) True
instance (PeanoLT a b t)
=> PeanoLT (S a) (S b) t
-- Absolute difference
class PeanoAbsDiff a b c | a b -> c
instance PeanoAbsDiff Z Z Z
instance PeanoAbsDiff Z (S b) (S b)
instance PeanoAbsDiff (S a) Z (S a)
instance (PeanoAbsDiff a b c)
=> PeanoAbsDiff (S a) (S b) c
-- Integers from n to 0
class Range n xs | n -> xs
instance Range Z Nil
instance (Range n xs)
=> Range (S n) (Cons n xs)
class LegalCompare t | -> t where
legalCompare :: t
instance (PeanoEqual (S Z) (S Z) t)
=> LegalCompare t
{-
:t legalCompare
=> :: True
-}
class IllegalCompare t | -> t where
illegalCompare :: t
instance (PeanoEqual True (Cons Z False) t)
=> IllegalCompare t
{-
:t illegalCompare
=> :: PeanoEqual True (Cons Z False) t => t
-}
class Apply f a r | f a -> r
data Conj1 list
instance Apply (Conj1 list) x (Cons x list)
-- Map f over a list
class Map f xs ys | f xs -> ys
instance Map f Nil Nil
instance (Apply f x y, Map f xs ys)
=> Map f (Cons x xs) (Cons y ys)
-- Map f over list and concatenate results together
class MapCat f xs zs | f xs -> zs
instance MapCat f Nil Nil
instance (Map f xs chunks, ListConcatAll chunks ys)
=> MapCat f xs ys
-- Filter a list with an Apply-able predicate function
class AppendIf pred x ys zs | pred x ys -> zs
instance AppendIf True x ys (Cons x ys)
instance AppendIf False x ys ys
class Filter f xs ys | f xs -> ys
instance Filter f Nil Nil
instance (Apply f x t,
Filter f xs ys,
AppendIf t x ys zs)
=> Filter f (Cons x xs) zs
data Queen x y
data Queen1 x
instance Apply (Queen1 x) y (Queen x y)
-- A list of queens in row x with y from 0 to n.
class QueensInRow n x queens | n x -> queens
instance (Range n ys, Map (Queen1 x) ys queens)
=> QueensInRow n x queens
-- Does queen a threaten queen b?
class Threatens a b t | a b -> t
instance (PeanoEqual ax bx xeq,
PeanoEqual ay by yeq,
Or xeq yeq xyeq,
PeanoAbsDiff ax bx dx,
PeanoAbsDiff ay by dy,
PeanoEqual dx dy deq,
Or xyeq deq res)
=> Threatens (Queen ax ay) (Queen bx by) res
-- Partial application of Threatens
data Threatens1 a
instance (Threatens a b t)
=> Apply (Threatens1 a) b t
-- Is queen b compatible with all queen as?
class Safe config queen t | config queen -> t
instance (Map (Threatens1 queen) config m1,
AnyTrue m1 t1,
Not t1 t2)
=> Safe config queen t2
data Safe1 config
instance (Safe config queen t)
=> Apply (Safe1 config) queen t
-- Add a queen with the given x coordinate to a legal configuration, returning
-- a set of legal configurations.
class AddQueen n x c cs | n x c -> cs
instance (QueensInRow n x candidates,
Filter (Safe1 c) candidates filtered,
Map (Conj1 c) filtered cs)
=> AddQueen n x c cs
data AddQueen2 n x
instance (AddQueen n x c cs)
=> Apply (AddQueen2 n x) c cs
-- Add a queen at x to every configuration, returning a set of legal
-- configurations.
class AddQueenToAll n x cs cs' | n x cs -> cs'
instance (MapCat (AddQueen2 n x) cs cs')
=> AddQueenToAll n x cs cs'
-- Add queens recursively
class AddQueensIf pred n x cs cs' | pred n x cs -> cs'
instance AddQueensIf False n x cs cs
instance (AddQueenToAll n x cs cs2,
AddQueens n (S x) cs2 cs')
=> AddQueensIf True n x cs cs'
class AddQueens n x cs cs' | n x cs -> cs'
instance (PeanoLT x n pred,
AddQueensIf pred n x cs cs')
=> AddQueens n x cs cs'
-- Solve
class Solution n c | n -> c where
solution :: n -> c
instance (AddQueens n Z (Cons Nil Nil) cs, First cs c)
=> Solution n c where solution = undefined
{-
:t solution (undefined :: N6)
=>:: Cons (Queen (S (S (S (S (S Z))))) (S Z))
(Cons (Queen (S (S (S (S Z)))) (S (S (S Z))))
(Cons (Queen (S (S (S Z))) (S (S (S (S (S Z))))))
(Cons (Queen (S (S Z)) Z)
(Cons (Queen (S Z) (S (S Z)))
(Cons (Queen Z (S (S (S (S Z)))))
Nil)))))
:t solution (undefined :: N7)
:: Cons
(Queen (S (S (S (S (S (S Z)))))) (S N0))
(Cons
(Queen (S (S (S (S (S Z))))) (S N2))
(Cons
(Queen (S (S (S (S Z)))) (S N4))
(Cons
(Queen (S (S (S Z))) Z)
(Cons
(Queen (S (S Z)) (S N1))
(Cons (Queen (S Z) (S N3)) (Cons (Queen Z (S N5)) Nil))))))
-}
| haroldcarr/learn-haskell-coq-ml-etc | haskell/topic/type-level/2017-04-kyle-kingsbury-n-queens/Q.hs | unlicense | 6,385 | 0 | 8 | 1,890 | 1,993 | 1,040 | 953 | -1 | -1 |
{-# LANGUAGE
NoImplicitPrelude,
NoMonomorphismRestriction,
FlexibleInstances,
MultiParamTypeClasses,
TypeOperators,
TypeApplications,
ScopedTypeVariables
#-}
module DDF.Dual (module DDF.Dual, module DDF.Prod) where
import DDF.Prod
import qualified DDF.Meta.Dual as M
class Prod r => Dual r where
dual :: r h ((x, y) -> M.Dual x y)
runDual :: r h (M.Dual x y -> (x, y))
mkDual :: r h (x -> y -> M.Dual x y)
mkDual = curry1 dual
dualOrig :: r h (M.Dual x y -> x)
dualOrig = zro `com2` runDual
dualDiff :: r h (M.Dual x y -> y)
dualDiff = fst `com2` runDual
dualCmp :: r h (Cmp x -> Cmp (M.Dual x y))
dualCmp = cmpMap1 dualOrig
dualGetOrdC :: Ord r x :- OrdC r (M.Dual x y)
dual1 = app dual
mkDual2 = app2 mkDual
dualOrig1 = app dualOrig
dualDiff1 = app dualDiff
runDual1 = app1 runDual
instance (Ord r x, Dual r) => Ord r (M.Dual x y) where
cmp = app dualCmp cmp
getOrdC _ = Dict \\ dualGetOrdC @r @x @y
| ThoughtWorksInc/DeepDarkFantasy | DDF/Dual.hs | apache-2.0 | 947 | 0 | 13 | 212 | 399 | 209 | 190 | 31 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE FlexibleContexts #-}
-- A number of standard aggregation functions.
module Spark.Core.Internal.Groups(
GroupData,
LogicalGroupData,
-- Typed functions
groupByKey,
mapGroup,
aggKey,
groupAsDS
-- Developer
) where
import qualified Data.Text as T
import qualified Data.Vector as V
import Formatting
import Debug.Trace(trace)
import Spark.Core.Internal.DatasetStructures
import Spark.Core.Internal.ColumnStructures
import Spark.Core.Internal.ColumnFunctions(untypedCol, colType, colOp, iUntypedColData, colOrigin, castTypeCol, dropColReference, genColOp)
import Spark.Core.Internal.DatasetFunctions
import Spark.Core.Internal.LocalDataFunctions()
import Spark.Core.Internal.FunctionsInternals
import Spark.Core.Internal.TypesFunctions(tupleType, structTypeFromFields)
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.Projections
import Spark.Core.Internal.TypesStructures
import Spark.Core.Internal.Utilities
import Spark.Core.Internal.RowStructures(Cell)
import Spark.Core.Try
import Spark.Core.StructuresInternal
import Spark.Core.Internal.CanRename
{-| A dataset that has been partitioned according to some given field.
-}
data GroupData key val = GroupData {
-- The dataset of reference for this group
_gdRef :: !UntypedDataset,
-- The columns used to partition the data by keys.
_gdKey :: !GroupColumn,
-- The columns that contain the values.
_gdValue :: !GroupColumn
}
type LogicalGroupData = Try UntypedGroupData
-- A column in a group, that can be used either for key or for values.
-- It is different from the column data, because it does not include
-- broadcast data.
data GroupColumn = GroupColumn {
_gcType :: !DataType,
_gcOp :: !ColOp,
_gcRefName :: !(Maybe FieldName)
} deriving (Eq, Show)
{-| (developper)
A group data type with no typing information.
-}
type UntypedGroupData = GroupData Cell Cell
-- type GroupTry a = Either T.Text a
-- A useful type when chaining operations withing groups.
data PipedTrans =
PipedError !T.Text
| PipedDataset !UntypedDataset
| PipedGroup !UntypedGroupData
deriving (Show)
{-| Performs a logical group of data based on a key.
-}
groupByKey :: (HasCallStack) => Column ref key -> Column ref val -> GroupData key val
groupByKey keys vals = forceRight $ _castGroup (colType keys) (colType vals) =<< _groupByKey (iUntypedColData keys) (iUntypedColData vals)
{-| Transforms the values in a group.
-}
-- This only allows direct transforms, so it is probably valid in all cases.
mapGroup :: GroupData key val -> (forall ref. Column ref val -> Column ref val') -> GroupData key val'
mapGroup g f =
let c = _valueCol g
c' = f (_unsafeCastColData c)
-- Assume for now that there is no broadcast.
-- TODO: deal with broadcast eventually
gVals = forceRight $ _groupCol c'
in g { _gdValue = gVals }
{-| The generalized value transform.
This generalizes mapGroup to allow more complex transforms involving joins,
groups, etc.
-}
-- TODO: this can fail
-- magGroupGen :: (forall ref. Column ref val -> Dataset val') -> GroupData key val -> GroupData key val'
-- magGroupGen _ _ = undefined
{-| Given a group and an aggregation function, aggregates the data.
Note: not all the reduction functions may be used in this case. The analyzer
will fail if the function is not universal.
-}
-- TODO: it should be a try, this can fail
aggKey :: (HasCallStack) => GroupData key val -> (forall ref. Column ref val -> LocalData val') -> Dataset (key, val')
aggKey gd f = trace "aggKey" $
let ugd = _untypedGroup gd
keyt = traceHint "aggKey: keyt: " $ mapGroupKeys gd colType
valt = traceHint "aggKey: valt: " $ mapGroupValues gd colType
-- We call the function twice: the first one to recover the type info,
-- and the second time to perform the unrolling.
-- TODO we should be able to do it in one pass instead.
fOut = traceHint "aggKey: fOut: " $ f (mapGroupValues gd dropColReference)
valt' = traceHint "aggKey: valt': " $ nodeType fOut
t = traceHint "aggKey: t: " $ tupleType keyt valt'
f' c = untypedLocalData . f <$> castTypeCol valt c
tud = traceHint "aggKey: tud: " $ _aggKey ugd f'
res = castType' t tud
in forceRight res
{-| Creates a group by 'expanding' a value into a potentially large collection.
Note on performance: this function is optimized to work at any scale and may not
be the most efficient when the generated collections are small (a few elements).
-}
-- TODO: it should be a try, this can fail
-- expand :: Column ref key -> Column ref val -> (LocalData val -> Dataset val') -> GroupData key val'
-- expand = undefined
{-| Builds groups within groups.
This function allows groups to be constructed from each collections inside a
group.
This function is usually not used directly by the user, but rather as part of
more complex pipelines that may involve multiple levels of nesting.
-}
-- groupInGroup :: GroupData key val -> (forall ref. Column ref val -> GroupData key' val') -> GroupData (key', key) val'
-- groupInGroup _ _ = undefined
{-| Reduces a group in group into a single group.
-}
-- aggGroup :: GroupData (key, key') val -> (forall ref. LocalData key -> Column ref val -> LocalData val') -> GroupData key val
-- aggGroup _ _ = undefined
{-| Returns the collapsed representation of a grouped dataset, discarding group
information.
-}
groupAsDS :: forall key val. GroupData key val -> Dataset (key, val)
groupAsDS g = pack s where
c1 = _unsafeCastColData (_keyCol g) :: Column UnknownReference key
c2 = _unsafeCastColData (_valueCol g) :: Column UnknownReference val
s = struct (c1, c2) :: Column UnknownReference (key, val)
mapGroupKeys :: GroupData key val -> (forall ref. Column ref key -> a) -> a
mapGroupKeys gd f =
f (_unsafeCastColData (_keyCol gd))
mapGroupValues :: GroupData key val -> (forall ref. Column ref val -> a) -> a
mapGroupValues gd f =
f (_unsafeCastColData (_valueCol gd))
-- ******** INSTANCES ***********
instance Show (GroupData key val) where
show gd = T.unpack s where
s = sformat ("GroupData[key="%sh%", val="%sh%"]") (_gdKey gd) (_gdValue gd)
-- ******** PRIVATE METHODS ********
_keyCol :: GroupData key val -> UntypedColumnData
_keyCol gd = ColumnData {
_cOrigin = _gdRef gd,
_cType = _gcType (_gdKey gd),
_cOp = genColOp . _gcOp . _gdKey $ gd,
_cReferingPath = _gcRefName . _gdKey $ gd
}
_valueCol :: GroupData key val -> UntypedColumnData
_valueCol gd = ColumnData {
_cOrigin = _gdRef gd,
_cType = _gcType (_gdValue gd),
_cOp = genColOp . _gcOp . _gdValue $ gd,
_cReferingPath = _gcRefName . _gdValue $ gd
}
_pError :: T.Text -> PipedTrans
_pError = PipedError
_unrollTransform :: PipedTrans -> NodeId -> UntypedNode -> PipedTrans
_unrollTransform start nid un | nodeId un == nid = start
_unrollTransform start nid un = case nodeParents un of
[p] ->
let pt' = _unrollTransform start nid p in _unrollStep pt' un
_ ->
_pError $ sformat (sh%": operations with multiple parents cannot be used in groups yet.") un
_unrollStep :: PipedTrans -> UntypedNode -> PipedTrans
_unrollStep pt un = traceHint ("_unrollStep: pt=" <> show' pt <> " un=" <> show' un <> " res=") $
let op = nodeOp un
dt = unSQLType (nodeType un) in case nodeParents un of
[p] ->
case (pt, op) of
(PipedError e, _) -> PipedError e
(PipedDataset ds, NodeStructuredTransform _) ->
-- This is simply dointg a DS -> DS transform.
-- TODO: this breaks the encapsulation of ComputeNode
let ds' = updateNode un (\un' -> un' { _cnParents = V.singleton (untyped ds)})
in PipedDataset ds'
(PipedGroup g, NodeStructuredTransform co) ->
_unrollGroupTrans g co
(PipedGroup g, NodeAggregatorReduction uao) ->
case uaoInitialOuter uao of
OpaqueAggTransform x -> _pError $ sformat ("Cannot apply opaque transform in the context of an aggregation: "%sh) x
InnerAggOp ao ->
PipedDataset $ _applyAggOp dt ao g
_ -> _pError $ sformat (sh%": Operation not supported with trans="%sh%" and parents="%sh) op pt p
l -> _pError $ sformat (sh%": expected one parent but got "%sh) un l
-- dt: output type of the aggregation op
_applyAggOp :: (HasCallStack) => DataType -> AggOp -> UntypedGroupData -> UntypedDataset
_applyAggOp dt ao ugd = traceHint ("_applyAggOp dt=" <> show' dt <> " ao=" <> show' ao <> " ugd=" <> show' ugd <> " res=") $
-- Reset the names to make sure there are no collision.
let c1 = untypedCol (_keyCol ugd) @@ T.unpack "_1"
c2 = untypedCol (_valueCol ugd) @@ T.unpack "_2"
s = struct' [c1, c2]
p = pack1 <$> s
ds = forceRight p
-- The structure of the result dataframe
keyDt = unSQLType (colType (_keyCol ugd))
st' = structTypeFromFields [(unsafeFieldName "key", keyDt), (unsafeFieldName "agg", dt)]
-- The keys are different, so we know we this operation is legit:
st = forceRight st'
resDt = SQLType . StrictType . Struct $ st
ds2 = emptyDataset (NodeGroupedReduction ao) resDt `parents` [untyped ds]
in ds2
_unrollGroupTrans :: UntypedGroupData -> ColOp -> PipedTrans
_unrollGroupTrans ugd co =
let gco = colOp (_valueCol ugd) in case colOpNoBroadcast gco of
Left x -> _pError $ "_unrollGroupTrans (1): using unimplemented feature:" <> show' x
Right co' -> case _combineColOp co' co of
-- TODO: this is ugly, we are loosing the error structure.
Left x -> _pError $ "_unrollGroupTrans (2): failure with " <> show' x
Right co'' -> case _groupCol $ _transformCol co'' (_valueCol ugd) of
Left x -> _pError $ "_unrollGroupTrans (3): failure with " <> show' x
Right g -> PipedGroup $ ugd { _gdValue = g }
-- TODO: this should be moved to ColumnFunctions
_transformCol :: ColOp -> UntypedColumnData -> UntypedColumnData
-- TODO: at this point, it should be checked for correctness (the fields
-- being extracted should exist)
_transformCol co ucd = ucd { _cOp = genColOp co }
-- Takes a column operation and chain it with another column operation.
_combineColOp :: ColOp -> ColOp -> Try ColOp
_combineColOp _ (x @ (ColLit _ _)) = pure x
_combineColOp x (ColFunction fn v) =
ColFunction fn <$> sequence (_combineColOp x <$> v)
_combineColOp x (ColExtraction fp) = _extractColOp x (V.toList (unFieldPath fp))
_combineColOp x (ColStruct v) =
ColStruct <$> sequence (f <$> v) where
f (TransformField n val) = TransformField n <$> _combineColOp x val
_extractColOp :: ColOp -> [FieldName] -> Try ColOp
_extractColOp x [] = pure x
_extractColOp (ColStruct s) (fn : t) =
case V.find (\x -> tfName x == fn) s of
Just (TransformField _ co) ->
_extractColOp co t
Nothing ->
tryError $ sformat ("Expected to find field "%sh%" in structure "%sh) fn s
_extractColOp x y =
tryError $ sformat ("Cannot perform extraction "%sh%" on column operation "%sh) y x
_aggKey :: UntypedGroupData -> (UntypedColumnData -> Try UntypedLocalData) -> Try UntypedDataset
_aggKey ugd f =
let inputDt = unSQLType . colType . _valueCol $ ugd
p = placeholder inputDt :: UntypedDataset
startNid = nodeId p in do
uld <- f (_unsafeCastColData (asCol p))
case _unrollTransform (PipedGroup ugd) startNid (untyped uld) of
PipedError t -> tryError t
PipedGroup g ->
-- This is a programming error
tryError $ sformat ("Expected a dataframe at the output but got a group: "%sh) g
PipedDataset ds -> pure ds
_unsafeCastColData :: Column ref a -> Column ref' a'
_unsafeCastColData c = c { _cType = _cType c }
{-| Checks that the group can be cast.
-}
_castGroup ::
SQLType key -> SQLType val -> UntypedGroupData -> Try (GroupData key val)
_castGroup (SQLType keyType) (SQLType valType) ugd =
let keyType' = unSQLType . colType . _keyCol $ ugd
valType' = unSQLType . colType . _valueCol $ ugd in
if keyType == keyType'
then if valType == valType'
then
pure ugd { _gdRef = _gdRef ugd }
else
tryError $ sformat ("The value column (of type "%sh%") cannot be cast to type "%sh) valType' valType
else
tryError $ sformat ("The value column (of type "%sh%") cannot be cast to type "%sh) keyType' keyType
_untypedGroup :: GroupData key val -> UntypedGroupData
_untypedGroup gd = gd { _gdRef = _gdRef gd }
_groupByKey :: UntypedColumnData -> UntypedColumnData -> LogicalGroupData
_groupByKey keys vals =
if nodeId (colOrigin keys) == nodeId (colOrigin vals)
then
-- Get the latest data (packed)
-- TODO: put a scoping
let s = struct (keys, vals) :: Column UnknownReference (Cell, Cell)
ds = pack1 s
keys' = ds // _1
vals' = ds // _2
in do
gKeys <- _groupCol keys'
gVals <- _groupCol vals'
return GroupData {
_gdRef = colOrigin keys',
_gdKey = gKeys,
_gdValue = gVals
}
else
tryError $ sformat ("The columns have different origin: "%sh%" and "%sh) keys vals
_groupCol :: Column ref a -> Try GroupColumn
_groupCol c = do
co <- colOpNoBroadcast (colOp c)
return GroupColumn {
_gcType = unSQLType $ colType c,
_gcOp = co,
_gcRefName = Nothing
}
| krapsh/kraps-haskell | src/Spark/Core/Internal/Groups.hs | apache-2.0 | 13,399 | 1 | 25 | 2,878 | 3,282 | 1,706 | 1,576 | 227 | 7 |
import Control.Applicative
import Control.Monad
import Control.Monad.Loops
import Control.Monad.Primitive
import Control.Concurrent
import Control.Concurrent.Async
import Control.Concurrent.STM
import Criterion.Main
import Criterion.Measurement
import Data.Map (Map)
import Data.Monoid
import Data.Monoid.Statistics.Numeric hiding (Min, Max)
import Data.Semigroup
import Network.HTTP
import System.Random.MWC
import qualified Data.Map as Map
numberOfThreads = 4
numberOfRequests = 100 * 1000
numberOfRequestsPerThread = div numberOfRequests numberOfThreads
data Event = Event Int ResponseCode | Kill deriving Show
type Counts = Map ResponseCode Integer
main :: IO ()
main = withSystemRandom $ \gen -> do
shared <- atomically $ (newTQueue :: STM (TQueue Event))
runners <- replicateM numberOfThreads $ async (replicateM numberOfRequestsPerThread . nfIO $ action shared gen)
worker <- async (stats shared (\x -> (Min x, Max x, Mean 1 (fromIntegral x))))
_ <- waitAll runners
_ <- atomically $ writeTQueue shared Kill
count <- wait worker
_ <- putStrLn . show $ count
return () where
action :: TQueue Event -> Gen (PrimState IO) -> IO ()
action shared gen = do
path <- genString gen
(duration, code) <- time $ simpleHTTP (getRequest ("http://localhost/" ++ path)) >>= getResponseCode
_ <- atomically . writeTQueue shared $ Event (round (duration * 1000)) code
return ()
stats :: Monoid a => TQueue Event -> (Int -> a) -> IO (a, Counts)
stats shared f = stats_ shared f mempty Map.empty
stats_ :: Monoid a => TQueue Event -> (Int -> a) -> a -> Counts -> IO (a, Counts)
stats_ shared f m counts = do
event <- atomically $ readTQueue shared
case event of
Event duration code -> stats_ shared f (mappend m (f duration)) (Map.insert code ((Map.findWithDefault 0 code counts) + 1) counts)
Kill -> return (m, counts)
{-- TODO Move to async --}
waitAll :: [Async a] -> IO [(Async a, a)]
waitAll asyncs =
atomically . sequence $ map (\a -> do r <- waitSTM a; return (a, r)) asyncs
{-- TODO Move to commons --}
genString :: Gen (PrimState IO) -> IO String
genString gen = do
xs <- replicateM 4 $ uniformR (start, end) gen
return $ map toEnum xs where
start = fromEnum 'a'
end = fromEnum 'z'
| aloiscochard/influo | src/Main.hs | apache-2.0 | 2,388 | 0 | 19 | 558 | 877 | 446 | 431 | 54 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-|
Module : Network.NSQ.Identify
Description : The metadata component for formatting and parsing the metadata sent to nsqd as part of the feature negotiation done upon connection establish.
-}
module Network.NSQ.Identify
( defaultIdentify
, defaultUserAgent
, encodeMetadata
) where
import Prelude hiding (take)
import Data.Maybe
import qualified Data.Map.Strict as Map
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import Data.Aeson ((.=))
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as A
import Network.NSQ.Types
-- | Build a default 'IdentifyMetadata' that makes sense which is
-- basically just setting the client 'Identification' and leaving
-- the rest of the settings up to the server to determine.
defaultIdentify :: T.Text -> T.Text -> IdentifyMetadata
defaultIdentify cid host = IdentifyMetadata
{ ident = Identification cid host Nothing Nothing Nothing
, tls = Nothing
, compression = Nothing
, heartbeatInterval = Nothing
, outputBufferSize = Nothing
, outputBufferTimeout = Nothing
, sampleRate = Nothing
, custom = Nothing
, customNegotiation = False
}
-- | The default user agent to send, for identifying what client library is
-- connecting to the nsqd.
defaultUserAgent :: T.Text
defaultUserAgent = "hsnsq/0.1.2.0" -- TODO: find out how to identify this in the build step
-- | Generate a collection of Aeson pairs to insert into the json
-- that is being sent to the server as part of the metadata negotiation.
featureNegotiation :: IdentifyMetadata -> [A.Pair]
featureNegotiation im = catMaybes
(
tlsSettings (tls im)
++
[ optionalSettings "heartbeat_interval" (-1) $ heartbeatInterval im
, optionalSettings "output_buffer_size" (-1) $ outputBufferSize im
, optionalSettings "output_buffer_timeout" (-1) $ outputBufferTimeout im
, optionalSettings "sample_rate" 0 $ sampleRate im
]
++
optionalCompression (compression im)
)
-- | Take an optional setting and render an Aeson pair.
optionalSettings :: T.Text -> Int -> Maybe OptionalSetting -> Maybe A.Pair
optionalSettings _ _ Nothing = Nothing
optionalSettings name def (Just Disabled) = Just (name, A.toJSON def)
optionalSettings name _ (Just (Custom val)) = Just (name, A.toJSON val)
-- | Render the Aeson pairs for optional compression
optionalCompression :: Maybe Compression -> [Maybe A.Pair]
optionalCompression Nothing = []
optionalCompression (Just NoCompression) = Just `fmap` [ "snappy" .= False, "deflate" .= False ]
optionalCompression (Just Snappy) = Just `fmap` [ "snappy" .= True, "deflate" .= False ]
optionalCompression (Just (Deflate l)) = Just `fmap` [ "snappy" .= False, "deflate" .= True, "deflate_level" .= l ]
-- | Take the custom settings out of the custom map and render Aeson pairs
customMetadata :: Maybe (Map.Map T.Text T.Text) -> [A.Pair]
customMetadata Nothing = []
customMetadata (Just val) = Map.foldrWithKey (\k v xs -> (k .= v):xs) [] val
-- | Tls settings
tlsSettings :: Maybe TLS -> [Maybe A.Pair]
tlsSettings Nothing = []
tlsSettings (Just NoTLS) = [Just $ "tls_v1" .= False]
tlsSettings (Just TLSV1) = [Just $ "tls_v1" .= True]
-- TODO: This is an Orphan instance because the type is in types.hs, need to fix this
instance A.ToJSON IdentifyMetadata where
toJSON im@(IdentifyMetadata{ident=i}) = A.object
(
-- Identification section
[ "client_id" .= clientId i
, "hostname" .= hostname i
, "short_id" .= fromMaybe (clientId i) (shortId i)
, "long_id" .= fromMaybe (hostname i) (longId i)
, "user_agent" .= fromMaybe defaultUserAgent (userAgent i)
-- Feature Negotiation section
, "feature_negotiation" .= (not (null $ featureNegotiation im) || customNegotiation im)
]
++
featureNegotiation im
++
customMetadata (custom im)
)
-- | Encode the metadata from 'IdentifyMetadata' into a 'ByteString' for
-- feeding the 'Identify' 'Command' for sending the metadata to the nsq
-- daemon as part of the feature negotiation.
encodeMetadata :: IdentifyMetadata -> BL.ByteString
encodeMetadata = A.encode
| pharaun/hsnsq | src/Network/NSQ/Identify.hs | apache-2.0 | 4,377 | 0 | 17 | 980 | 966 | 532 | 434 | 69 | 1 |
{-# LANGUAGE PackageImports #-}
import "fff" Application (develMain)
import Prelude (IO)
main :: IO ()
main = develMain
| Drezil/FFF | app/devel.hs | apache-2.0 | 121 | 0 | 6 | 19 | 34 | 20 | 14 | 5 | 1 |
module Network.Server.MultiLevelDB.Const where
import Network.Server.MultiLevelDB.Util
import qualified Data.ByteString.Lazy.Char8 as B
import Data.Binary.Put (runPut, putWord8)
keyPrefix = B.head $ runPut $ putWord8 1
indexPrefix = B.head $ runPut $ putWord8 2
freePrefix = B.head $ runPut $ putWord8 3
lastPrimaryKey = put2Words 0 1
lastIndexKey = put2Words 0 2
lastFreeKey = put2Words 0 3
| wmoss/multileveldb | src/Network/Server/MultiLevelDB/Const.hs | bsd-2-clause | 397 | 0 | 7 | 58 | 123 | 70 | 53 | 10 | 1 |
module ExampleModel where
import qualified Data.Map as DataMap
import MonadServ
data ExampleReturnObject =
ExampleReturnObject
{ retCode :: String
, retValue :: Int
, labels :: [String]
}
instance JSONObject ExampleReturnObject where
toJSON ( ExampleReturnObject retCode retValue labels) =
Object (DataMap.fromList [("retCode", String retCode )
,("retValue", Number ((fromIntegral $ retValue)::Double))
,("labels", Array ( map (String) labels))
])
| jcaldwell/monadserv | src/ExampleModel.hs | bsd-2-clause | 628 | 0 | 14 | 236 | 142 | 83 | 59 | 13 | 0 |
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
-- | 'Patch'es on 'Map' that can insert, delete, and move values from one key to
-- another
module Reflex.Patch.MapWithMove where
import Reflex.Patch.Class
import Control.Arrow
import Control.Monad.State
import Data.Foldable
import Data.Function
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Semigroup (Semigroup (..), (<>))
import qualified Data.Set as Set
import Data.These (These(..))
import Data.Tuple
-- | Patch a DMap with additions, deletions, and moves. Invariant: If key @k1@
-- is coming from @From_Move k2@, then key @k2@ should be going to @Just k1@,
-- and vice versa. There should never be any unpaired From/To keys.
newtype PatchMapWithMove k v = PatchMapWithMove (Map k (NodeInfo k v)) deriving (Show, Eq, Ord, Functor, Foldable, Traversable)
-- | Holds the information about each key: where its new value should come from,
-- and where its old value should go to
data NodeInfo k v = NodeInfo
{ _nodeInfo_from :: !(From k v)
-- ^ Where do we get the new value for this key?
, _nodeInfo_to :: !(To k)
-- ^ If the old value is being kept (i.e. moved rather than deleted or
-- replaced), where is it going?
}
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable)
-- | Describe how a key's new value should be produced
data From k v
= From_Insert v -- ^ Insert the given value here
| From_Delete -- ^ Delete the existing value, if any, from here
| From_Move !k -- ^ Move the value here from the given key
deriving (Show, Read, Eq, Ord, Functor, Foldable, Traversable)
-- | Describe where a key's old value will go. If this is 'Just', that means
-- the key's old value will be moved to the given other key; if it is 'Nothing',
-- that means it will be deleted.
type To = Maybe
-- | Create a 'PatchMapWithMove', validating it
patchMapWithMove :: Ord k => Map k (NodeInfo k v) -> Maybe (PatchMapWithMove k v)
patchMapWithMove m = if valid then Just $ PatchMapWithMove m else Nothing
where valid = forwardLinks == backwardLinks
forwardLinks = Map.mapMaybe _nodeInfo_to m
backwardLinks = Map.fromList $ catMaybes $ flip fmap (Map.toList m) $ \(to, v) ->
case _nodeInfo_from v of
From_Move from -> Just (from, to)
_ -> Nothing
-- | Create a 'PatchMapWithMove' that inserts everything in the given 'Map'
patchMapWithMoveInsertAll :: Map k v -> PatchMapWithMove k v
patchMapWithMoveInsertAll m = PatchMapWithMove $ flip fmap m $ \v -> NodeInfo
{ _nodeInfo_from = From_Insert v
, _nodeInfo_to = Nothing
}
-- | Extract the internal representation of the 'PatchMapWithMove'
unPatchMapWithMove :: PatchMapWithMove k v -> Map k (NodeInfo k v)
unPatchMapWithMove (PatchMapWithMove p) = p
-- | Make a @'PatchMapWithMove' k v@ which has the effect of inserting or updating a value @v@ to the given key @k@, like 'Map.insert'.
insertMapKey :: k -> v -> PatchMapWithMove k v
insertMapKey k v = PatchMapWithMove . Map.singleton k $ NodeInfo (From_Insert v) Nothing
-- |Make a @'PatchMapWithMove' k v@ which has the effect of moving the value from the first key @k@ to the second key @k@, equivalent to:
--
-- @
-- 'Map.delete' src (maybe map ('Map.insert' dst) (Map.lookup src map))
-- @
moveMapKey :: Ord k => k -> k -> PatchMapWithMove k v
moveMapKey src dst
| src == dst = mempty
| otherwise =
PatchMapWithMove $ Map.fromList
[ (dst, NodeInfo (From_Move src) Nothing)
, (src, NodeInfo From_Delete (Just dst))
]
-- |Make a @'PatchMapWithMove' k v@ which has the effect of swapping two keys in the mapping, equivalent to:
--
-- @
-- let aMay = Map.lookup a map
-- bMay = Map.lookup b map
-- in maybe id (Map.insert a) (bMay `mplus` aMay)
-- . maybe id (Map.insert b) (aMay `mplus` bMay)
-- . Map.delete a . Map.delete b $ map
-- @
swapMapKey :: Ord k => k -> k -> PatchMapWithMove k v
swapMapKey src dst
| src == dst = mempty
| otherwise =
PatchMapWithMove $ Map.fromList
[ (dst, NodeInfo (From_Move src) (Just src))
, (src, NodeInfo (From_Move dst) (Just dst))
]
-- |Make a @'PatchMapWithMove' k v@ which has the effect of deleting a key in the mapping, equivalent to 'Map.delete'.
deleteMapKey :: k -> PatchMapWithMove k v
deleteMapKey k = PatchMapWithMove . Map.singleton k $ NodeInfo From_Delete Nothing
-- | Wrap a @'Map' k (NodeInfo k v)@ representing patch changes into a @'PatchMapWithMove' k v@, without checking any invariants.
--
-- __Warning:__ when using this function, you must ensure that the invariants of 'PatchMapWithMove' are preserved; they will not be checked.
unsafePatchMapWithMove :: Map k (NodeInfo k v) -> PatchMapWithMove k v
unsafePatchMapWithMove = PatchMapWithMove
-- | Apply the insertions, deletions, and moves to a given 'Map'
instance Ord k => Patch (PatchMapWithMove k v) where
type PatchTarget (PatchMapWithMove k v) = Map k v
apply (PatchMapWithMove p) old = Just $! insertions `Map.union` (old `Map.difference` deletions) --TODO: return Nothing sometimes --Note: the strict application here is critical to ensuring that incremental merges don't hold onto all their prerequisite events forever; can we make this more robust?
where insertions = flip Map.mapMaybeWithKey p $ \_ ni -> case _nodeInfo_from ni of
From_Insert v -> Just v
From_Move k -> Map.lookup k old
From_Delete -> Nothing
deletions = flip Map.mapMaybeWithKey p $ \_ ni -> case _nodeInfo_from ni of
From_Delete -> Just ()
_ -> Nothing
-- | Returns all the new elements that will be added to the 'Map'.
patchMapWithMoveNewElements :: PatchMapWithMove k v -> [v]
patchMapWithMoveNewElements = Map.elems . patchMapWithMoveNewElementsMap
-- | Return a @'Map' k v@ with all the inserts/updates from the given @'PatchMapWithMove' k v@.
patchMapWithMoveNewElementsMap :: PatchMapWithMove k v -> Map k v
patchMapWithMoveNewElementsMap (PatchMapWithMove p) = Map.mapMaybe f p
where f ni = case _nodeInfo_from ni of
From_Insert v -> Just v
From_Move _ -> Nothing
From_Delete -> Nothing
-- | Create a 'PatchMapWithMove' that, if applied to the given 'Map', will sort
-- its values using the given ordering function. The set keys of the 'Map' is
-- not changed.
patchThatSortsMapWith :: Ord k => (v -> v -> Ordering) -> Map k v -> PatchMapWithMove k v
patchThatSortsMapWith cmp m = PatchMapWithMove $ Map.fromList $ catMaybes $ zipWith g unsorted sorted
where unsorted = Map.toList m
sorted = sortBy (cmp `on` snd) unsorted
f (to, _) (from, _) = if to == from then Nothing else
Just (from, to)
reverseMapping = Map.fromList $ catMaybes $ zipWith f unsorted sorted
g (to, _) (from, _) = if to == from then Nothing else
let Just movingTo = Map.lookup from reverseMapping
in Just (to, NodeInfo (From_Move from) $ Just movingTo)
-- | Create a 'PatchMapWithMove' that, if applied to the first 'Map' provided,
-- will produce a 'Map' with the same values as the second 'Map' but with the
-- values sorted with the given ordering function.
patchThatChangesAndSortsMapWith :: forall k v. (Ord k, Ord v) => (v -> v -> Ordering) -> Map k v -> Map k v -> PatchMapWithMove k v
patchThatChangesAndSortsMapWith cmp oldByIndex newByIndexUnsorted = patchThatChangesMap oldByIndex newByIndex
where newList = Map.toList newByIndexUnsorted
newByIndex = Map.fromList $ zip (fst <$> newList) $ sortBy cmp $ snd <$> newList
-- | Create a 'PatchMapWithMove' that, if applied to the first 'Map' provided,
-- will produce the second 'Map'.
patchThatChangesMap :: (Ord k, Ord v) => Map k v -> Map k v -> PatchMapWithMove k v
patchThatChangesMap oldByIndex newByIndex = patch
where oldByValue = Map.fromListWith Set.union $ swap . first Set.singleton <$> Map.toList oldByIndex
(insertsAndMoves, unusedValuesByValue) = flip runState oldByValue $ do
let f k v = do
remainingValues <- get
let putRemainingKeys remainingKeys = put $ if Set.null remainingKeys
then Map.delete v remainingValues
else Map.insert v remainingKeys remainingValues
case Map.lookup v remainingValues of
Nothing -> return $ NodeInfo (From_Insert v) $ Just undefined -- There's no existing value we can take
Just fromKs ->
if k `Set.member` fromKs
then do
putRemainingKeys $ Set.delete k fromKs
return $ NodeInfo (From_Move k) $ Just undefined -- There's an existing value, and it's here, so no patch necessary
else do
(fromK, remainingKeys) <- return . fromJust $ Set.minView fromKs -- There's an existing value, but it's not here; move it here
putRemainingKeys remainingKeys
return $ NodeInfo (From_Move fromK) $ Just undefined
Map.traverseWithKey f newByIndex
unusedOldKeys = fold unusedValuesByValue
pointlessMove k = \case
From_Move k' | k == k' -> True
_ -> False
keyWasMoved k = if k `Map.member` oldByIndex && not (k `Set.member` unusedOldKeys)
then Just undefined
else Nothing
patch = unsafePatchMapWithMove $ Map.filterWithKey (\k -> not . pointlessMove k . _nodeInfo_from) $ Map.mergeWithKey (\k a _ -> Just $ nodeInfoSetTo (keyWasMoved k) a) (Map.mapWithKey $ \k -> nodeInfoSetTo $ keyWasMoved k) (Map.mapWithKey $ \k _ -> NodeInfo From_Delete $ keyWasMoved k) insertsAndMoves oldByIndex
-- | Change the 'From' value of a 'NodeInfo'
nodeInfoMapFrom :: (From k v -> From k v) -> NodeInfo k v -> NodeInfo k v
nodeInfoMapFrom f ni = ni { _nodeInfo_from = f $ _nodeInfo_from ni }
-- | Change the 'From' value of a 'NodeInfo', using a 'Functor' (or
-- 'Applicative', 'Monad', etc.) action to get the new value
nodeInfoMapMFrom :: Functor f => (From k v -> f (From k v)) -> NodeInfo k v -> f (NodeInfo k v)
nodeInfoMapMFrom f ni = fmap (\result -> ni { _nodeInfo_from = result }) $ f $ _nodeInfo_from ni
-- | Set the 'To' field of a 'NodeInfo'
nodeInfoSetTo :: To k -> NodeInfo k v -> NodeInfo k v
nodeInfoSetTo to ni = ni { _nodeInfo_to = to }
-- |Helper data structure used for composing patches using the monoid instance.
data Fixup k v
= Fixup_Delete
| Fixup_Update (These (From k v) (To k))
-- |Compose patches having the same effect as applying the patches in turn: @'applyAlways' (p <> q) == 'applyAlways' p . 'applyAlways' q@
instance Ord k => Semigroup (PatchMapWithMove k v) where
PatchMapWithMove ma <> PatchMapWithMove mb = PatchMapWithMove m
where
connections = Map.toList $ Map.intersectionWithKey (\_ a b -> (_nodeInfo_to a, _nodeInfo_from b)) ma mb
h :: (k, (Maybe k, From k v)) -> [(k, Fixup k v)]
h (_, (mToAfter, editBefore)) = case (mToAfter, editBefore) of
(Just toAfter, From_Move fromBefore)
| fromBefore == toAfter
-> [(toAfter, Fixup_Delete)]
| otherwise
-> [ (toAfter, Fixup_Update (This editBefore))
, (fromBefore, Fixup_Update (That mToAfter))
]
(Nothing, From_Move fromBefore) -> [(fromBefore, Fixup_Update (That mToAfter))] -- The item is destroyed in the second patch, so indicate that it is destroyed in the source map
(Just toAfter, _) -> [(toAfter, Fixup_Update (This editBefore))]
(Nothing, _) -> []
mergeFixups _ Fixup_Delete Fixup_Delete = Fixup_Delete
mergeFixups _ (Fixup_Update a) (Fixup_Update b)
| This x <- a, That y <- b
= Fixup_Update $ These x y
| That y <- a, This x <- b
= Fixup_Update $ These x y
mergeFixups _ _ _ = error "PatchMapWithMove: incompatible fixups"
fixups = Map.fromListWithKey mergeFixups $ concatMap h connections
combineNodeInfos _ nia nib = NodeInfo
{ _nodeInfo_from = _nodeInfo_from nia
, _nodeInfo_to = _nodeInfo_to nib
}
applyFixup _ ni = \case
Fixup_Delete -> Nothing
Fixup_Update u -> Just $ NodeInfo
{ _nodeInfo_from = fromMaybe (_nodeInfo_from ni) $ getHere u
, _nodeInfo_to = fromMaybe (_nodeInfo_to ni) $ getThere u
}
m = Map.differenceWithKey applyFixup (Map.unionWithKey combineNodeInfos ma mb) fixups
getHere :: These a b -> Maybe a
getHere = \case
This a -> Just a
These a _ -> Just a
That _ -> Nothing
getThere :: These a b -> Maybe b
getThere = \case
This _ -> Nothing
These _ b -> Just b
That b -> Just b
--TODO: Figure out how to implement this in terms of PatchDMapWithMove rather than duplicating it here
-- |Compose patches having the same effect as applying the patches in turn: @'applyAlways' (p <> q) == 'applyAlways' p . 'applyAlways' q@
instance Ord k => Monoid (PatchMapWithMove k v) where
mempty = PatchMapWithMove mempty
mappend = (<>)
| Saulzar/reflex | src/Reflex/Patch/MapWithMove.hs | bsd-3-clause | 13,335 | 0 | 25 | 3,165 | 3,204 | 1,658 | 1,546 | 187 | 6 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
{-| Binary instances for the core datatypes -}
module Idris.Core.Binary where
import Control.Applicative ((<*>), (<$>))
import Control.Monad (liftM2)
import Control.DeepSeq (($!!))
import Data.Binary
import Data.Vector.Binary
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
import Idris.Core.TT
instance Binary ErrorReportPart where
put (TextPart msg) = do putWord8 0 ; put msg
put (NamePart n) = do putWord8 1 ; put n
put (TermPart t) = do putWord8 2 ; put t
put (SubReport ps) = do putWord8 3 ; put ps
put (RawPart r) = do putWord8 4 ; put r
get = do i <- getWord8
case i of
0 -> fmap TextPart get
1 -> fmap NamePart get
2 -> fmap TermPart get
3 -> fmap SubReport get
4 -> fmap RawPart get
_ -> error "Corrupted binary data for ErrorReportPart"
instance Binary Provenance where
put ExpectedType = putWord8 0
put (SourceTerm t) = do putWord8 1
put t
put InferredVal = putWord8 2
put GivenVal = putWord8 3
put (TooManyArgs t) = do putWord8 4
put t
get = do i <- getWord8
case i of
0 -> return ExpectedType
1 -> do x1 <- get
return (SourceTerm x1)
2 -> return InferredVal
3 -> return GivenVal
4 -> do x1 <- get
return (TooManyArgs x1)
_ -> error "Corrupted binary data for Provenance"
instance Binary UConstraint where
put (ULT x1 x2) = putWord8 0 >> put x1 >> put x2
put (ULE x1 x2) = putWord8 1 >> put x1 >> put x2
get = do i <- getWord8
case i of
0 -> ULT <$> get <*> get
1 -> ULE <$> get <*> get
_ -> error "Corrupted binary data for UConstraint"
instance Binary ConstraintFC where
put (ConstraintFC x1 x2) = putWord8 0 >> put x1 >> put x2
get = do i <- getWord8
case i of
0 -> liftM2 ConstraintFC get get
_ -> error "Corrupted binary data for ConstraintFC"
instance Binary a => Binary (Err' a) where
put (Msg str) = do putWord8 0
put str
put (InternalMsg str) = do putWord8 1
put str
put (CantUnify x y z e ctxt i) = do putWord8 2
put x
put y
put z
put e
put ctxt
put i
put (InfiniteUnify n t ctxt) = do putWord8 3
put n
put t
put ctxt
put (CantConvert x y ctxt) = do putWord8 4
put x
put y
put ctxt
put (CantSolveGoal x ctxt) = do putWord8 5
put x
put ctxt
put (UnifyScope n1 n2 x ctxt) = do putWord8 6
put n1
put n2
put x
put ctxt
put (CantInferType str) = do putWord8 7
put str
put (NonFunctionType t1 t2) = do putWord8 8
put t1
put t2
put (NotEquality t1 t2) = do putWord8 9
put t1
put t2
put (TooManyArguments n) = do putWord8 10
put n
put (CantIntroduce t) = do putWord8 11
put t
put (NoSuchVariable n) = do putWord8 12
put n
put (NoTypeDecl n) = do putWord8 13
put n
put (NotInjective x y z) = do putWord8 14
put x
put y
put z
put (CantResolve _ t) = do putWord8 15
put t
put (CantResolveAlts ns) = do putWord8 16
put ns
put (IncompleteTerm t) = do putWord8 17
put t
put (UniverseError x1 x2 x3 x4 x5) = do putWord8 18
put x1
put x2
put x3
put x4
put x5
put (UniqueError u n) = do putWord8 19
put u
put n
put (UniqueKindError u n) = do putWord8 20
put u
put n
put ProgramLineComment = putWord8 21
put (Inaccessible n) = do putWord8 22
put n
put (NonCollapsiblePostulate n) = do putWord8 23
put n
put (AlreadyDefined n) = do putWord8 24
put n
put (ProofSearchFail e) = do putWord8 25
put e
put (NoRewriting t) = do putWord8 26
put t
put (At fc e) = do putWord8 27
put fc
put e
put (Elaborating str n e) = do putWord8 28
put str
put n
put e
put (ElaboratingArg n1 n2 ns e) = do putWord8 29
put n1
put n2
put ns
put e
put (ProviderError str) = do putWord8 30
put str
put (LoadingFailed str e) = do putWord8 31
put str
put e
put (ReflectionError parts e) = do putWord8 32
put parts
put e
put (ReflectionFailed str e) = do putWord8 33
put str
put e
put (WithFnType t) = do putWord8 34
put t
put (CantMatch t) = do putWord8 35
put t
put (ElabScriptDebug x1 x2 x3) = do putWord8 36
put x1
put x2
put x3
put (NoEliminator s t) = do putWord8 37
put s
put t
put (InvalidTCArg n t) = do putWord8 38
put n
put t
put (ElabScriptStuck x1) = do putWord8 39
put x1
put (UnknownImplicit n f) = do putWord8 40
put n
put f
get = do i <- getWord8
case i of
0 -> fmap Msg get
1 -> fmap InternalMsg get
2 -> do x <- get ; y <- get ; z <- get ; e <- get ; ctxt <- get ; i <- get
return $ CantUnify x y z e ctxt i
3 -> do x <- get ; y <- get ; z <- get
return $ InfiniteUnify x y z
4 -> do x <- get ; y <- get ; z <- get
return $ CantConvert x y z
5 -> do x <- get ; y <- get
return $ CantSolveGoal x y
6 -> do w <- get ; x <- get ; y <- get ; z <- get
return $ UnifyScope w x y z
7 -> fmap CantInferType get
8 -> do x <- get ; y <- get
return $ NonFunctionType x y
9 -> do x <- get ; y <- get
return $ NotEquality x y
10 -> fmap TooManyArguments get
11 -> fmap CantIntroduce get
12 -> fmap NoSuchVariable get
13 -> fmap NoTypeDecl get
14 -> do x <- get ; y <- get ; z <- get
return $ NotInjective x y z
15 -> fmap (CantResolve False) get
16 -> fmap CantResolveAlts get
17 -> fmap IncompleteTerm get
18 -> UniverseError <$> get <*> get <*> get <*> get <*> get
19 -> do x <- get ; y <- get
return $ UniqueError x y
20 -> do x <- get ; y <- get
return $ UniqueKindError x y
21 -> return ProgramLineComment
22 -> fmap Inaccessible get
23 -> fmap NonCollapsiblePostulate get
24 -> fmap AlreadyDefined get
25 -> fmap ProofSearchFail get
26 -> fmap NoRewriting get
27 -> do x <- get ; y <- get
return $ At x y
28 -> do x <- get ; y <- get ; z <- get
return $ Elaborating x y z
29 -> do w <- get ; x <- get ; y <- get ; z <- get
return $ ElaboratingArg w x y z
30 -> fmap ProviderError get
31 -> do x <- get ; y <- get
return $ LoadingFailed x y
32 -> do x <- get ; y <- get
return $ ReflectionError x y
33 -> do x <- get ; y <- get
return $ ReflectionFailed x y
34 -> fmap WithFnType get
35 -> fmap CantMatch get
36 -> do x1 <- get
x2 <- get
x3 <- get
return (ElabScriptDebug x1 x2 x3)
37 -> do x1 <- get
x2 <- get
return (NoEliminator x1 x2)
38 -> do x1 <- get
x2 <- get
return (InvalidTCArg x1 x2)
39 -> do x1 <- get
return (ElabScriptStuck x1)
40 -> do x <- get ; y <- get
return $ UnknownImplicit x y
_ -> error "Corrupted binary data for Err'"
----- Generated by 'derive'
instance Binary FC where
put x =
case x of
(FC x1 (x2, x3) (x4, x5)) -> do putWord8 0
put x1
put (x2 * 65536 + x3)
put (x4 * 65536 + x5)
NoFC -> putWord8 1
FileFC x1 -> do putWord8 2
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2x3 <- get
x4x5 <- get
return (FC x1 (x2x3 `div` 65536, x2x3 `mod` 65536) (x4x5 `div` 65536, x4x5 `mod` 65536))
1 -> return NoFC
2 -> do x1 <- get
return (FileFC x1)
_ -> error "Corrupted binary data for FC"
instance Binary Name where
put x
= case x of
UN x1 -> do putWord8 0
put x1
NS x1 x2 -> do putWord8 1
put x1
put x2
MN x1 x2 -> do putWord8 2
put x1
put x2
NErased -> putWord8 3
SN x1 -> do putWord8 4
put x1
SymRef x1 -> do putWord8 5
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (UN x1)
1 -> do x1 <- get
x2 <- get
return (NS x1 x2)
2 -> do x1 <- get
x2 <- get
return (MN x1 x2)
3 -> return NErased
4 -> do x1 <- get
return (SN x1)
5 -> do x1 <- get
return (SymRef x1)
_ -> error "Corrupted binary data for Name"
instance Binary SpecialName where
put x
= case x of
WhereN x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
InstanceN x1 x2 -> do putWord8 1
put x1
put x2
ParentN x1 x2 -> do putWord8 2
put x1
put x2
MethodN x1 -> do putWord8 3
put x1
CaseN x1 -> do putWord8 4; put x1
ElimN x1 -> do putWord8 5; put x1
InstanceCtorN x1 -> do putWord8 6; put x1
WithN x1 x2 -> do putWord8 7
put x1
put x2
MetaN x1 x2 -> do putWord8 8
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (WhereN x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (InstanceN x1 x2)
2 -> do x1 <- get
x2 <- get
return (ParentN x1 x2)
3 -> do x1 <- get
return (MethodN x1)
4 -> do x1 <- get
return (CaseN x1)
5 -> do x1 <- get
return (ElimN x1)
6 -> do x1 <- get
return (InstanceCtorN x1)
7 -> do x1 <- get
x2 <- get
return (WithN x1 x2)
8 -> do x1 <- get
x2 <- get
return (MetaN x1 x2)
_ -> error "Corrupted binary data for SpecialName"
instance Binary Const where
put x
= case x of
I x1 -> do putWord8 0
put x1
BI x1 -> do putWord8 1
put x1
Fl x1 -> do putWord8 2
put x1
Ch x1 -> do putWord8 3
put x1
Str x1 -> do putWord8 4
put x1
B8 x1 -> putWord8 5 >> put x1
B16 x1 -> putWord8 6 >> put x1
B32 x1 -> putWord8 7 >> put x1
B64 x1 -> putWord8 8 >> put x1
(AType (ATInt ITNative)) -> putWord8 9
(AType (ATInt ITBig)) -> putWord8 10
(AType ATFloat) -> putWord8 11
(AType (ATInt ITChar)) -> putWord8 12
StrType -> putWord8 13
Forgot -> putWord8 15
(AType (ATInt (ITFixed ity))) -> putWord8 (fromIntegral (16 + fromEnum ity)) -- 16-19 inclusive
VoidType -> putWord8 27
WorldType -> putWord8 28
TheWorld -> putWord8 29
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (I x1)
1 -> do x1 <- get
return (BI x1)
2 -> do x1 <- get
return (Fl x1)
3 -> do x1 <- get
return (Ch x1)
4 -> do x1 <- get
return (Str x1)
5 -> fmap B8 get
6 -> fmap B16 get
7 -> fmap B32 get
8 -> fmap B64 get
9 -> return (AType (ATInt ITNative))
10 -> return (AType (ATInt ITBig))
11 -> return (AType ATFloat)
12 -> return (AType (ATInt ITChar))
13 -> return StrType
15 -> return Forgot
16 -> return (AType (ATInt (ITFixed IT8)))
17 -> return (AType (ATInt (ITFixed IT16)))
18 -> return (AType (ATInt (ITFixed IT32)))
19 -> return (AType (ATInt (ITFixed IT64)))
27 -> return VoidType
28 -> return WorldType
29 -> return TheWorld
_ -> error "Corrupted binary data for Const"
instance Binary Raw where
put x
= case x of
Var x1 -> do putWord8 0
put x1
RBind x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
RApp x1 x2 -> do putWord8 2
put x1
put x2
RType -> putWord8 3
RConstant x1 -> do putWord8 4
put x1
RForce x1 -> do putWord8 5
put x1
RUType x1 -> do putWord8 6
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Var x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (RBind x1 x2 x3)
2 -> do x1 <- get
x2 <- get
return (RApp x1 x2)
3 -> return RType
4 -> do x1 <- get
return (RConstant x1)
5 -> do x1 <- get
return (RForce x1)
6 -> do x1 <- get
return (RUType x1)
_ -> error "Corrupted binary data for Raw"
instance Binary ImplicitInfo where
put x
= case x of
Impl x1 -> put x1
get
= do x1 <- get
return (Impl x1)
instance (Binary b) => Binary (Binder b) where
put x
= case x of
Lam x1 -> do putWord8 0
put x1
Pi x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
Let x1 x2 -> do putWord8 2
put x1
put x2
NLet x1 x2 -> do putWord8 3
put x1
put x2
Hole x1 -> do putWord8 4
put x1
GHole x1 x2 -> do putWord8 5
put x1
put x2
Guess x1 x2 -> do putWord8 6
put x1
put x2
PVar x1 -> do putWord8 7
put x1
PVTy x1 -> do putWord8 8
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Lam x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (Pi x1 x2 x3)
2 -> do x1 <- get
x2 <- get
return (Let x1 x2)
3 -> do x1 <- get
x2 <- get
return (NLet x1 x2)
4 -> do x1 <- get
return (Hole x1)
5 -> do x1 <- get
x2 <- get
return (GHole x1 x2)
6 -> do x1 <- get
x2 <- get
return (Guess x1 x2)
7 -> do x1 <- get
return (PVar x1)
8 -> do x1 <- get
return (PVTy x1)
_ -> error "Corrupted binary data for Binder"
instance Binary Universe where
put x = case x of
UniqueType -> putWord8 0
AllTypes -> putWord8 1
NullType -> putWord8 2
get = do i <- getWord8
case i of
0 -> return UniqueType
1 -> return AllTypes
2 -> return NullType
_ -> error "Corrupted binary data for Universe"
instance Binary NameType where
put x
= case x of
Bound -> putWord8 0
Ref -> putWord8 1
DCon x1 x2 x3 -> do putWord8 2
put (x1 * 65536 + x2)
put x3
TCon x1 x2 -> do putWord8 3
put (x1 * 65536 + x2)
get
= do i <- getWord8
case i of
0 -> return Bound
1 -> return Ref
2 -> do x1x2 <- get
x3 <- get
return (DCon (x1x2 `div` 65536) (x1x2 `mod` 65536) x3)
3 -> do x1x2 <- get
return (TCon (x1x2 `div` 65536) (x1x2 `mod` 65536))
_ -> error "Corrupted binary data for NameType"
-- record concrete levels only, for now
instance Binary UExp where
put x = case x of
UVar t -> do putWord8 0
put ((-1) :: Int) -- TMP HACK!
UVal t -> do putWord8 1
put t
get = do i <- getWord8
case i of
0 -> do x1 <- get
return (UVar x1)
1 -> do x1 <- get
return (UVal x1)
_ -> error "Corrupted binary data for UExp"
instance {- (Binary n) => -} Binary (TT Name) where
put x
= {-# SCC "putTT" #-}
case x of
P x1 x2 x3 -> do putWord8 0
put x1
put x2
-- put x3
V x1 -> if (x1 >= 0 && x1 < 256)
then do putWord8 1
putWord8 (toEnum (x1 + 1))
else do putWord8 9
put x1
Bind x1 x2 x3 -> do putWord8 2
put x1
put x2
put x3
App _ x1 x2 -> do putWord8 3
put x1
put x2
Constant x1 -> do putWord8 4
put x1
Proj x1 x2 -> do putWord8 5
put x1
putWord8 (toEnum (x2 + 1))
Erased -> putWord8 6
TType x1 -> do putWord8 7
put x1
Impossible -> putWord8 8
UType x1 -> do putWord8 10
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
-- x3 <- get
return (P x1 x2 Erased)
1 -> do x1 <- getWord8
return (V ((fromEnum x1) - 1))
2 -> do x1 <- get
x2 <- get
x3 <- get
return (Bind x1 x2 x3)
3 -> do x1 <- get
x2 <- get
return (App Complete x1 x2)
4 -> do x1 <- get
return (Constant x1)
5 -> do x1 <- get
x2 <- getWord8
return (Proj x1 ((fromEnum x2)-1))
6 -> return Erased
7 -> do x1 <- get
return (TType x1)
8 -> return Impossible
9 -> do x1 <- get
return (V x1)
10 -> do x1 <- get
return (UType x1)
_ -> error "Corrupted binary data for TT"
| bkoropoff/Idris-dev | src/Idris/Core/Binary.hs | bsd-3-clause | 25,159 | 0 | 19 | 14,956 | 7,340 | 3,248 | 4,092 | 613 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
module Network.Kafka.Primitive.GroupCoordinator where
import qualified Data.Vector as V
import Control.Lens
import Network.Kafka.Exports
import Network.Kafka.Types
newtype GroupCoordinatorRequestV0 = GroupCoordinatorRequestV0
{ groupCoordinatorRequestV0ConsumerGroup :: Utf8
} deriving (Show, Eq, Generic)
instance Binary GroupCoordinatorRequestV0
instance ByteSize GroupCoordinatorRequestV0 where
byteSize = byteSize . groupCoordinatorRequestV0ConsumerGroup
data GroupCoordinatorResponseV0 = GroupCoordinatorResponseV0
{ groupCoordinatorResponseV0ErrorCode :: !ErrorCode
, groupCoordinatorResponseV0CoordinatorId :: !CoordinatorId
, groupCoordinatorResponseV0CoordinatorHost :: !Utf8
, groupCoordinatorResponseV0CoordinatorPort :: !Int32
} deriving (Show, Eq, Generic)
instance Binary GroupCoordinatorResponseV0
instance ByteSize GroupCoordinatorResponseV0 where
byteSize r = byteSize (groupCoordinatorResponseV0ErrorCode r) +
byteSize (groupCoordinatorResponseV0CoordinatorId r) +
byteSize (groupCoordinatorResponseV0CoordinatorHost r) +
byteSize (groupCoordinatorResponseV0CoordinatorPort r)
instance RequestApiKey GroupCoordinatorRequestV0 where
apiKey = theApiKey 10
instance RequestApiVersion GroupCoordinatorRequestV0 where
apiVersion = const 0
| iand675/hs-kafka | src/Network/Kafka/Primitive/GroupCoordinator.hs | bsd-3-clause | 1,562 | 0 | 11 | 273 | 245 | 135 | 110 | 40 | 0 |
-- The @FamInst@ type: family instance heads
{-# LANGUAGE CPP, GADTs, ViewPatterns #-}
module FamInst (
FamInstEnvs, tcGetFamInstEnvs,
checkFamInstConsistency, tcExtendLocalFamInstEnv,
tcLookupDataFamInst, tcLookupDataFamInst_maybe,
tcInstNewTyCon_maybe, tcTopNormaliseNewTypeTF_maybe,
newFamInst,
-- * Injectivity
reportInjectivityErrors, reportConflictingInjectivityErrs
) where
import GhcPrelude
import HscTypes
import FamInstEnv
import InstEnv( roughMatchTcs )
import Coercion
import CoreLint
import TcEvidence
import GHC.Iface.Load
import TcRnMonad
import SrcLoc
import TyCon
import TcType
import CoAxiom
import DynFlags
import Module
import Outputable
import Util
import RdrName
import DataCon ( dataConName )
import Maybes
import TyCoRep
import TyCoFVs
import TyCoPpr ( pprWithExplicitKindsWhen )
import TcMType
import Name
import Panic
import VarSet
import FV
import Bag( Bag, unionBags, unitBag )
import Control.Monad
import Data.List.NonEmpty ( NonEmpty(..) )
import qualified GHC.LanguageExtensions as LangExt
#include "HsVersions.h"
{- Note [The type family instance consistency story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To preserve type safety we must ensure that for any given module, all
the type family instances used either in that module or in any module
it directly or indirectly imports are consistent. For example, consider
module F where
type family F a
module A where
import F( F )
type instance F Int = Bool
f :: F Int -> Bool
f x = x
module B where
import F( F )
type instance F Int = Char
g :: Char -> F Int
g x = x
module Bad where
import A( f )
import B( g )
bad :: Char -> Int
bad c = f (g c)
Even though module Bad never mentions the type family F at all, by
combining the functions f and g that were type checked in contradictory
type family instance environments, the function bad is able to coerce
from one type to another. So when we type check Bad we must verify that
the type family instances defined in module A are consistent with those
defined in module B.
How do we ensure that we maintain the necessary consistency?
* Call a module which defines at least one type family instance a
"family instance module". This flag `mi_finsts` is recorded in the
interface file.
* For every module we calculate the set of all of its direct and
indirect dependencies that are family instance modules. This list
`dep_finsts` is also recorded in the interface file so we can compute
this list for a module from the lists for its direct dependencies.
* When type checking a module M we check consistency of all the type
family instances that are either provided by its `dep_finsts` or
defined in the module M itself. This is a pairwise check, i.e., for
every pair of instances we must check that they are consistent.
- For family instances coming from `dep_finsts`, this is checked in
checkFamInstConsistency, called from tcRnImports. See Note
[Checking family instance consistency] for details on this check
(and in particular how we avoid having to do all these checks for
every module we compile).
- That leaves checking the family instances defined in M itself
against instances defined in either M or its `dep_finsts`. This is
checked in `tcExtendLocalFamInstEnv'.
There are four subtle points in this scheme which have not been
addressed yet.
* We have checked consistency of the family instances *defined* by M
or its imports, but this is not by definition the same thing as the
family instances *used* by M or its imports. Specifically, we need to
ensure when we use a type family instance while compiling M that this
instance was really defined from either M or one of its imports,
rather than being an instance that we happened to know about from
reading an interface file in the course of compiling an unrelated
module. Otherwise, we'll end up with no record of the fact that M
depends on this family instance and type safety will be compromised.
See #13102.
* It can also happen that M uses a function defined in another module
which is not transitively imported by M. Examples include the
desugaring of various overloaded constructs, and references inserted
by Template Haskell splices. If that function's definition makes use
of type family instances which are not checked against those visible
from M, type safety can again be compromised. See #13251.
* When a module C imports a boot module B.hs-boot, we check that C's
type family instances are compatible with those visible from
B.hs-boot. However, C will eventually be linked against a different
module B.hs, which might define additional type family instances which
are inconsistent with C's. This can also lead to loss of type safety.
See #9562.
* The call to checkFamConsistency for imported functions occurs very
early (in tcRnImports) and that causes problems if the imported
instances use type declared in the module being compiled.
See Note [Loading your own hi-boot file] in GHC.Iface.Load.
-}
{-
************************************************************************
* *
Making a FamInst
* *
************************************************************************
-}
-- All type variables in a FamInst must be fresh. This function
-- creates the fresh variables and applies the necessary substitution
-- It is defined here to avoid a dependency from FamInstEnv on the monad
-- code.
newFamInst :: FamFlavor -> CoAxiom Unbranched -> TcM FamInst
-- Freshen the type variables of the FamInst branches
newFamInst flavor axiom@(CoAxiom { co_ax_tc = fam_tc })
= ASSERT2( tyCoVarsOfTypes lhs `subVarSet` tcv_set, text "lhs" <+> pp_ax )
ASSERT2( lhs_kind `eqType` rhs_kind, text "kind" <+> pp_ax $$ ppr lhs_kind $$ ppr rhs_kind )
-- We used to have an assertion that the tyvars of the RHS were bound
-- by tcv_set, but in error situations like F Int = a that isn't
-- true; a later check in checkValidFamInst rejects it
do { (subst, tvs') <- freshenTyVarBndrs tvs
; (subst, cvs') <- freshenCoVarBndrsX subst cvs
; dflags <- getDynFlags
; let lhs' = substTys subst lhs
rhs' = substTy subst rhs
tcvs' = tvs' ++ cvs'
; ifErrsM (return ()) $ -- Don't lint when there are errors, because
-- errors might mean TcTyCons.
-- See Note [Recover from validity error] in TcTyClsDecls
when (gopt Opt_DoCoreLinting dflags) $
-- Check that the types involved in this instance are well formed.
-- Do /not/ expand type synonyms, for the reasons discussed in
-- Note [Linting type synonym applications].
case lintTypes dflags tcvs' (rhs':lhs') of
Nothing -> pure ()
Just fail_msg -> pprPanic "Core Lint error" (vcat [ fail_msg
, ppr fam_tc
, ppr subst
, ppr tvs'
, ppr cvs'
, ppr lhs'
, ppr rhs' ])
; return (FamInst { fi_fam = tyConName fam_tc
, fi_flavor = flavor
, fi_tcs = roughMatchTcs lhs
, fi_tvs = tvs'
, fi_cvs = cvs'
, fi_tys = lhs'
, fi_rhs = rhs'
, fi_axiom = axiom }) }
where
lhs_kind = tcTypeKind (mkTyConApp fam_tc lhs)
rhs_kind = tcTypeKind rhs
tcv_set = mkVarSet (tvs ++ cvs)
pp_ax = pprCoAxiom axiom
CoAxBranch { cab_tvs = tvs
, cab_cvs = cvs
, cab_lhs = lhs
, cab_rhs = rhs } = coAxiomSingleBranch axiom
{-
************************************************************************
* *
Optimised overlap checking for family instances
* *
************************************************************************
Note [Checking family instance consistency]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For any two family instance modules that we import directly or indirectly, we
check whether the instances in the two modules are consistent, *unless* we can
be certain that the instances of the two modules have already been checked for
consistency during the compilation of modules that we import.
Why do we need to check? Consider
module X1 where module X2 where
data T1 data T2
type instance F T1 b = Int type instance F a T2 = Char
f1 :: F T1 a -> Int f2 :: Char -> F a T2
f1 x = x f2 x = x
Now if we import both X1 and X2 we could make (f2 . f1) :: Int -> Char.
Notice that neither instance is an orphan.
How do we know which pairs of modules have already been checked? For each
module M we directly import, we look up the family instance modules that M
imports (directly or indirectly), say F1, ..., FN. For any two modules
among M, F1, ..., FN, we know that the family instances defined in those
two modules are consistent--because we checked that when we compiled M.
For every other pair of family instance modules we import (directly or
indirectly), we check that they are consistent now. (So that we can be
certain that the modules in our `HscTypes.dep_finsts' are consistent.)
There is some fancy footwork regarding hs-boot module loops, see
Note [Don't check hs-boot type family instances too early]
Note [Checking family instance optimization]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As explained in Note [Checking family instance consistency]
we need to ensure that every pair of transitive imports that define type family
instances is consistent.
Let's define df(A) = transitive imports of A that define type family instances
+ A, if A defines type family instances
Then for every direct import A, df(A) is already consistent.
Let's name the current module M.
We want to make sure that df(M) is consistent.
df(M) = df(D_1) U df(D_2) U ... U df(D_i) where D_1 .. D_i are direct imports.
We perform the check iteratively, maintaining a set of consistent modules 'C'
and trying to add df(D_i) to it.
The key part is how to ensure that the union C U df(D_i) is consistent.
Let's consider two modules: A and B from C U df(D_i).
There are nine possible ways to choose A and B from C U df(D_i):
| A in C only | A in C and B in df(D_i) | A in df(D_i) only
--------------------------------------------------------------------------------
B in C only | Already checked | Already checked | Needs to be checked
| when checking C | when checking C |
--------------------------------------------------------------------------------
B in C and | Already checked | Already checked | Already checked when
B in df(D_i) | when checking C | when checking C | checking df(D_i)
--------------------------------------------------------------------------------
B in df(D_i) | Needs to be | Already checked | Already checked when
only | checked | when checking df(D_i) | checking df(D_i)
That means to ensure that C U df(D_i) is consistent we need to check every
module from C - df(D_i) against every module from df(D_i) - C and
every module from df(D_i) - C against every module from C - df(D_i).
But since the checks are symmetric it suffices to pick A from C - df(D_i)
and B from df(D_i) - C.
In other words these are the modules we need to check:
[ (m1, m2) | m1 <- C, m1 not in df(D_i)
, m2 <- df(D_i), m2 not in C ]
One final thing to note here is that if there's lot of overlap between
subsequent df(D_i)'s then we expect those set differences to be small.
That situation should be pretty common in practice, there's usually
a set of utility modules that every module imports directly or indirectly.
This is basically the idea from #13092, comment:14.
-}
-- This function doesn't check ALL instances for consistency,
-- only ones that aren't involved in recursive knot-tying
-- loops; see Note [Don't check hs-boot type family instances too early].
-- We don't need to check the current module, this is done in
-- tcExtendLocalFamInstEnv.
-- See Note [The type family instance consistency story].
checkFamInstConsistency :: [Module] -> TcM ()
checkFamInstConsistency directlyImpMods
= do { (eps, hpt) <- getEpsAndHpt
; traceTc "checkFamInstConsistency" (ppr directlyImpMods)
; let { -- Fetch the iface of a given module. Must succeed as
-- all directly imported modules must already have been loaded.
modIface mod =
case lookupIfaceByModule hpt (eps_PIT eps) mod of
Nothing -> panicDoc "FamInst.checkFamInstConsistency"
(ppr mod $$ pprHPT hpt)
Just iface -> iface
-- Which family instance modules were checked for consistency
-- when we compiled `mod`?
-- Itself (if a family instance module) and its dep_finsts.
-- This is df(D_i) from
-- Note [Checking family instance optimization]
; modConsistent :: Module -> [Module]
; modConsistent mod =
if mi_finsts (mi_final_exts (modIface mod)) then mod:deps else deps
where
deps = dep_finsts . mi_deps . modIface $ mod
; hmiModule = mi_module . hm_iface
; hmiFamInstEnv = extendFamInstEnvList emptyFamInstEnv
. md_fam_insts . hm_details
; hpt_fam_insts = mkModuleEnv [ (hmiModule hmi, hmiFamInstEnv hmi)
| hmi <- eltsHpt hpt]
}
; checkMany hpt_fam_insts modConsistent directlyImpMods
}
where
-- See Note [Checking family instance optimization]
checkMany
:: ModuleEnv FamInstEnv -- home package family instances
-> (Module -> [Module]) -- given A, modules checked when A was checked
-> [Module] -- modules to process
-> TcM ()
checkMany hpt_fam_insts modConsistent mods = go [] emptyModuleSet mods
where
go :: [Module] -- list of consistent modules
-> ModuleSet -- set of consistent modules, same elements as the
-- list above
-> [Module] -- modules to process
-> TcM ()
go _ _ [] = return ()
go consistent consistent_set (mod:mods) = do
sequence_
[ check hpt_fam_insts m1 m2
| m1 <- to_check_from_mod
-- loop over toCheckFromMod first, it's usually smaller,
-- it may even be empty
, m2 <- to_check_from_consistent
]
go consistent' consistent_set' mods
where
mod_deps_consistent = modConsistent mod
mod_deps_consistent_set = mkModuleSet mod_deps_consistent
consistent' = to_check_from_mod ++ consistent
consistent_set' =
extendModuleSetList consistent_set to_check_from_mod
to_check_from_consistent =
filterOut (`elemModuleSet` mod_deps_consistent_set) consistent
to_check_from_mod =
filterOut (`elemModuleSet` consistent_set) mod_deps_consistent
-- Why don't we just minusModuleSet here?
-- We could, but doing so means one of two things:
--
-- 1. When looping over the cartesian product we convert
-- a set into a non-deterministicly ordered list. Which
-- happens to be fine for interface file determinism
-- in this case, today, because the order only
-- determines the order of deferred checks. But such
-- invariants are hard to keep.
--
-- 2. When looping over the cartesian product we convert
-- a set into a deterministically ordered list - this
-- adds some additional cost of sorting for every
-- direct import.
--
-- That also explains why we need to keep both 'consistent'
-- and 'consistentSet'.
--
-- See also Note [ModuleEnv performance and determinism].
check hpt_fam_insts m1 m2
= do { env1' <- getFamInsts hpt_fam_insts m1
; env2' <- getFamInsts hpt_fam_insts m2
-- We're checking each element of env1 against env2.
-- The cost of that is dominated by the size of env1, because
-- for each instance in env1 we look it up in the type family
-- environment env2, and lookup is cheap.
-- The code below ensures that env1 is the smaller environment.
; let sizeE1 = famInstEnvSize env1'
sizeE2 = famInstEnvSize env2'
(env1, env2) = if sizeE1 < sizeE2 then (env1', env2')
else (env2', env1')
-- Note [Don't check hs-boot type family instances too early]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Family instance consistency checking involves checking that
-- the family instances of our imported modules are consistent with
-- one another; this might lead you to think that this process
-- has nothing to do with the module we are about to typecheck.
-- Not so! Consider the following case:
--
-- -- A.hs-boot
-- type family F a
--
-- -- B.hs
-- import {-# SOURCE #-} A
-- type instance F Int = Bool
--
-- -- A.hs
-- import B
-- type family F a
--
-- When typechecking A, we are NOT allowed to poke the TyThing
-- for F until we have typechecked the family. Thus, we
-- can't do consistency checking for the instance in B
-- (checkFamInstConsistency is called during renaming).
-- Failing to defer the consistency check lead to #11062.
--
-- Additionally, we should also defer consistency checking when
-- type from the hs-boot file of the current module occurs on
-- the left hand side, as we will poke its TyThing when checking
-- for overlap.
--
-- -- F.hs
-- type family F a
--
-- -- A.hs-boot
-- import F
-- data T
--
-- -- B.hs
-- import {-# SOURCE #-} A
-- import F
-- type instance F T = Int
--
-- -- A.hs
-- import B
-- data T = MkT
--
-- In fact, it is even necessary to defer for occurrences in
-- the RHS, because we may test for *compatibility* in event
-- of an overlap.
--
-- Why don't we defer ALL of the checks to later? Well, many
-- instances aren't involved in the recursive loop at all. So
-- we might as well check them immediately; and there isn't
-- a good time to check them later in any case: every time
-- we finish kind-checking a type declaration and add it to
-- a context, we *then* consistency check all of the instances
-- which mentioned that type. We DO want to check instances
-- as quickly as possible, so that we aren't typechecking
-- values with inconsistent axioms in scope.
--
-- See also Note [Tying the knot]
-- for why we are doing this at all.
; let check_now = famInstEnvElts env1
; mapM_ (checkForConflicts (emptyFamInstEnv, env2)) check_now
; mapM_ (checkForInjectivityConflicts (emptyFamInstEnv,env2)) check_now
}
getFamInsts :: ModuleEnv FamInstEnv -> Module -> TcM FamInstEnv
getFamInsts hpt_fam_insts mod
| Just env <- lookupModuleEnv hpt_fam_insts mod = return env
| otherwise = do { _ <- initIfaceTcRn (loadSysInterface doc mod)
; eps <- getEps
; return (expectJust "checkFamInstConsistency" $
lookupModuleEnv (eps_mod_fam_inst_env eps) mod) }
where
doc = ppr mod <+> text "is a family-instance module"
{-
************************************************************************
* *
Lookup
* *
************************************************************************
-}
-- | If @co :: T ts ~ rep_ty@ then:
--
-- > instNewTyCon_maybe T ts = Just (rep_ty, co)
--
-- Checks for a newtype, and for being saturated
-- Just like Coercion.instNewTyCon_maybe, but returns a TcCoercion
tcInstNewTyCon_maybe :: TyCon -> [TcType] -> Maybe (TcType, TcCoercion)
tcInstNewTyCon_maybe = instNewTyCon_maybe
-- | Like 'tcLookupDataFamInst_maybe', but returns the arguments back if
-- there is no data family to unwrap.
-- Returns a Representational coercion
tcLookupDataFamInst :: FamInstEnvs -> TyCon -> [TcType]
-> (TyCon, [TcType], Coercion)
tcLookupDataFamInst fam_inst_envs tc tc_args
| Just (rep_tc, rep_args, co)
<- tcLookupDataFamInst_maybe fam_inst_envs tc tc_args
= (rep_tc, rep_args, co)
| otherwise
= (tc, tc_args, mkRepReflCo (mkTyConApp tc tc_args))
tcLookupDataFamInst_maybe :: FamInstEnvs -> TyCon -> [TcType]
-> Maybe (TyCon, [TcType], Coercion)
-- ^ Converts a data family type (eg F [a]) to its representation type (eg FList a)
-- and returns a coercion between the two: co :: F [a] ~R FList a.
tcLookupDataFamInst_maybe fam_inst_envs tc tc_args
| isDataFamilyTyCon tc
, match : _ <- lookupFamInstEnv fam_inst_envs tc tc_args
, FamInstMatch { fim_instance = rep_fam@(FamInst { fi_axiom = ax
, fi_cvs = cvs })
, fim_tys = rep_args
, fim_cos = rep_cos } <- match
, let rep_tc = dataFamInstRepTyCon rep_fam
co = mkUnbranchedAxInstCo Representational ax rep_args
(mkCoVarCos cvs)
= ASSERT( null rep_cos ) -- See Note [Constrained family instances] in FamInstEnv
Just (rep_tc, rep_args, co)
| otherwise
= Nothing
-- | 'tcTopNormaliseNewTypeTF_maybe' gets rid of top-level newtypes,
-- potentially looking through newtype /instances/.
--
-- It is only used by the type inference engine (specifically, when
-- solving representational equality), and hence it is careful to unwrap
-- only if the relevant data constructor is in scope. That's why
-- it get a GlobalRdrEnv argument.
--
-- It is careful not to unwrap data/newtype instances if it can't
-- continue unwrapping. Such care is necessary for proper error
-- messages.
--
-- It does not look through type families.
-- It does not normalise arguments to a tycon.
--
-- If the result is Just (rep_ty, (co, gres), rep_ty), then
-- co : ty ~R rep_ty
-- gres are the GREs for the data constructors that
-- had to be in scope
tcTopNormaliseNewTypeTF_maybe :: FamInstEnvs
-> GlobalRdrEnv
-> Type
-> Maybe ((Bag GlobalRdrElt, TcCoercion), Type)
tcTopNormaliseNewTypeTF_maybe faminsts rdr_env ty
-- cf. FamInstEnv.topNormaliseType_maybe and Coercion.topNormaliseNewType_maybe
= topNormaliseTypeX stepper plus ty
where
plus :: (Bag GlobalRdrElt, TcCoercion) -> (Bag GlobalRdrElt, TcCoercion)
-> (Bag GlobalRdrElt, TcCoercion)
plus (gres1, co1) (gres2, co2) = ( gres1 `unionBags` gres2
, co1 `mkTransCo` co2 )
stepper :: NormaliseStepper (Bag GlobalRdrElt, TcCoercion)
stepper = unwrap_newtype `composeSteppers` unwrap_newtype_instance
-- For newtype instances we take a double step or nothing, so that
-- we don't return the representation type of the newtype instance,
-- which would lead to terrible error messages
unwrap_newtype_instance rec_nts tc tys
| Just (tc', tys', co) <- tcLookupDataFamInst_maybe faminsts tc tys
= mapStepResult (\(gres, co1) -> (gres, co `mkTransCo` co1)) $
unwrap_newtype rec_nts tc' tys'
| otherwise = NS_Done
unwrap_newtype rec_nts tc tys
| Just con <- newTyConDataCon_maybe tc
, Just gre <- lookupGRE_Name rdr_env (dataConName con)
-- This is where we check that the
-- data constructor is in scope
= mapStepResult (\co -> (unitBag gre, co)) $
unwrapNewTypeStepper rec_nts tc tys
| otherwise
= NS_Done
{-
************************************************************************
* *
Extending the family instance environment
* *
************************************************************************
-}
-- Add new locally-defined family instances, checking consistency with
-- previous locally-defined family instances as well as all instances
-- available from imported modules. This requires loading all of our
-- imports that define family instances (if we haven't loaded them already).
tcExtendLocalFamInstEnv :: [FamInst] -> TcM a -> TcM a
-- If we weren't actually given any instances to add, then we don't want
-- to go to the bother of loading family instance module dependencies.
tcExtendLocalFamInstEnv [] thing_inside = thing_inside
-- Otherwise proceed...
tcExtendLocalFamInstEnv fam_insts thing_inside
= do { -- Load family-instance modules "below" this module, so that
-- allLocalFamInst can check for consistency with them
-- See Note [The type family instance consistency story]
loadDependentFamInstModules fam_insts
-- Now add the instances one by one
; env <- getGblEnv
; (inst_env', fam_insts') <- foldlM addLocalFamInst
(tcg_fam_inst_env env, tcg_fam_insts env)
fam_insts
; let env' = env { tcg_fam_insts = fam_insts'
, tcg_fam_inst_env = inst_env' }
; setGblEnv env' thing_inside
}
loadDependentFamInstModules :: [FamInst] -> TcM ()
-- Load family-instance modules "below" this module, so that
-- allLocalFamInst can check for consistency with them
-- See Note [The type family instance consistency story]
loadDependentFamInstModules fam_insts
= do { env <- getGblEnv
; let this_mod = tcg_mod env
imports = tcg_imports env
want_module mod -- See Note [Home package family instances]
| mod == this_mod = False
| home_fams_only = moduleUnitId mod == moduleUnitId this_mod
| otherwise = True
home_fams_only = all (nameIsHomePackage this_mod . fi_fam) fam_insts
; loadModuleInterfaces (text "Loading family-instance modules") $
filter want_module (imp_finsts imports) }
{- Note [Home package family instances]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Optimization: If we're only defining type family instances
for type families *defined in the home package*, then we
only have to load interface files that belong to the home
package. The reason is that there's no recursion between
packages, so modules in other packages can't possibly define
instances for our type families.
(Within the home package, we could import a module M that
imports us via an hs-boot file, and thereby defines an
instance of a type family defined in this module. So we can't
apply the same logic to avoid reading any interface files at
all, when we define an instances for type family defined in
the current module.
-}
-- Check that the proposed new instance is OK,
-- and then add it to the home inst env
-- This must be lazy in the fam_inst arguments, see Note [Lazy axiom match]
-- in FamInstEnv.hs
addLocalFamInst :: (FamInstEnv,[FamInst])
-> FamInst
-> TcM (FamInstEnv, [FamInst])
addLocalFamInst (home_fie, my_fis) fam_inst
-- home_fie includes home package and this module
-- my_fies is just the ones from this module
= do { traceTc "addLocalFamInst" (ppr fam_inst)
-- Unlike the case of class instances, don't override existing
-- instances in GHCi; it's unsound. See #7102.
; mod <- getModule
; traceTc "alfi" (ppr mod)
-- Fetch imported instances, so that we report
-- overlaps correctly.
-- Really we ought to only check consistency with
-- those instances which are transitively imported
-- by the current module, rather than every instance
-- we've ever seen. Fixing this is part of #13102.
; eps <- getEps
; let inst_envs = (eps_fam_inst_env eps, home_fie)
home_fie' = extendFamInstEnv home_fie fam_inst
-- Check for conflicting instance decls and injectivity violations
; ((), no_errs) <- askNoErrs $
do { checkForConflicts inst_envs fam_inst
; checkForInjectivityConflicts inst_envs fam_inst
; checkInjectiveEquation fam_inst
}
; if no_errs then
return (home_fie', fam_inst : my_fis)
else
return (home_fie, my_fis) }
{-
************************************************************************
* *
Checking an instance against conflicts with an instance env
* *
************************************************************************
Check whether a single family instance conflicts with those in two instance
environments (one for the EPS and one for the HPT).
-}
-- | Checks to make sure no two family instances overlap.
checkForConflicts :: FamInstEnvs -> FamInst -> TcM ()
checkForConflicts inst_envs fam_inst
= do { let conflicts = lookupFamInstEnvConflicts inst_envs fam_inst
; traceTc "checkForConflicts" $
vcat [ ppr (map fim_instance conflicts)
, ppr fam_inst
-- , ppr inst_envs
]
; reportConflictInstErr fam_inst conflicts }
checkForInjectivityConflicts :: FamInstEnvs -> FamInst -> TcM ()
-- see Note [Verifying injectivity annotation] in FamInstEnv, check 1B1.
checkForInjectivityConflicts instEnvs famInst
| isTypeFamilyTyCon tycon -- as opposed to data family tycon
, Injective inj <- tyConInjectivityInfo tycon
= let conflicts = lookupFamInstEnvInjectivityConflicts inj instEnvs famInst in
reportConflictingInjectivityErrs tycon conflicts (coAxiomSingleBranch (fi_axiom famInst))
| otherwise
= return ()
where tycon = famInstTyCon famInst
-- | Check whether a new open type family equation can be added without
-- violating injectivity annotation supplied by the user. Returns True when
-- this is possible and False if adding this equation would violate injectivity
-- annotation. This looks only at the one equation; it does not look for
-- interaction between equations. Use checkForInjectivityConflicts for that.
-- Does checks (2)-(4) of Note [Verifying injectivity annotation] in FamInstEnv.
checkInjectiveEquation :: FamInst -> TcM ()
checkInjectiveEquation famInst
| isTypeFamilyTyCon tycon
-- type family is injective in at least one argument
, Injective inj <- tyConInjectivityInfo tycon = do
{ dflags <- getDynFlags
; let axiom = coAxiomSingleBranch fi_ax
-- see Note [Verifying injectivity annotation] in FamInstEnv
; reportInjectivityErrors dflags fi_ax axiom inj
}
-- if there was no injectivity annotation or tycon does not represent a
-- type family we report no conflicts
| otherwise
= return ()
where tycon = famInstTyCon famInst
fi_ax = fi_axiom famInst
-- | Report a list of injectivity errors together with their source locations.
-- Looks only at one equation; does not look for conflicts *among* equations.
reportInjectivityErrors
:: DynFlags
-> CoAxiom br -- ^ Type family for which we generate errors
-> CoAxBranch -- ^ Currently checked equation (represented by axiom)
-> [Bool] -- ^ Injectivity annotation
-> TcM ()
reportInjectivityErrors dflags fi_ax axiom inj
= ASSERT2( any id inj, text "No injective type variables" )
do let lhs = coAxBranchLHS axiom
rhs = coAxBranchRHS axiom
fam_tc = coAxiomTyCon fi_ax
(unused_inj_tvs, unused_vis, undec_inst_flag)
= unusedInjTvsInRHS dflags fam_tc lhs rhs
inj_tvs_unused = not $ isEmptyVarSet unused_inj_tvs
tf_headed = isTFHeaded rhs
bare_variables = bareTvInRHSViolated lhs rhs
wrong_bare_rhs = not $ null bare_variables
when inj_tvs_unused $ reportUnusedInjectiveVarsErr fam_tc unused_inj_tvs
unused_vis undec_inst_flag axiom
when tf_headed $ reportTfHeadedErr fam_tc axiom
when wrong_bare_rhs $ reportBareVariableInRHSErr fam_tc bare_variables axiom
-- | Is type headed by a type family application?
isTFHeaded :: Type -> Bool
-- See Note [Verifying injectivity annotation], case 3.
isTFHeaded ty | Just ty' <- coreView ty
= isTFHeaded ty'
isTFHeaded ty | (TyConApp tc args) <- ty
, isTypeFamilyTyCon tc
= args `lengthIs` tyConArity tc
isTFHeaded _ = False
-- | If a RHS is a bare type variable return a set of LHS patterns that are not
-- bare type variables.
bareTvInRHSViolated :: [Type] -> Type -> [Type]
-- See Note [Verifying injectivity annotation], case 2.
bareTvInRHSViolated pats rhs | isTyVarTy rhs
= filter (not . isTyVarTy) pats
bareTvInRHSViolated _ _ = []
------------------------------------------------------------------
-- Checking for the coverage condition for injective type families
------------------------------------------------------------------
{-
Note [Coverage condition for injective type families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The Injective Type Families paper describes how we can tell whether
or not a type family equation upholds the injectivity condition.
Briefly, consider the following:
type family F a b = r | r -> a -- NB: b is not injective
type instance F ty1 ty2 = ty3
We need to make sure that all variables mentioned in ty1 are mentioned in ty3
-- that's how we know that knowing ty3 determines ty1. But they can't be
mentioned just anywhere in ty3: they must be in *injective* positions in ty3.
For example:
type instance F a Int = Maybe (G a)
This is no good, if G is not injective. However, if G is indeed injective,
then this would appear to meet our needs. There is a trap here, though: while
knowing G a does indeed determine a, trying to compute a from G a might not
terminate. This is precisely the same problem that we have with functional
dependencies and their liberal coverage condition. Here is the test case:
type family G a = r | r -> a
type instance G [a] = [G a]
[W] G alpha ~ [alpha]
We see that the equation given applies, because G alpha equals a list. So we
learn that alpha must be [beta] for some beta. We then have
[W] G [beta] ~ [[beta]]
This can reduce to
[W] [G beta] ~ [[beta]]
which then decomposes to
[W] G beta ~ [beta]
right where we started. The equation G [a] = [G a] thus is dangerous: while
it does not violate the injectivity assumption, it might throw us into a loop,
with a particularly dastardly Wanted.
We thus do what functional dependencies do: require -XUndecidableInstances to
accept this.
Checking the coverage condition is not terribly hard, but we also want to produce
a nice error message. A nice error message has at least two properties:
1. If any of the variables involved are invisible or are used in an invisible context,
we want to print invisible arguments (as -fprint-explicit-kinds does).
2. If we fail to accept the equation because we're worried about non-termination,
we want to suggest UndecidableInstances.
To gather the right information, we can talk about the *usage* of a variable. Every
variable is used either visibly or invisibly, and it is either not used at all,
in a context where acceptance requires UndecidableInstances, or in a context that
does not require UndecidableInstances. If a variable is used both visibly and
invisibly, then we want to remember the fact that it was used invisibly: printing
out invisibles will be helpful for the user to understand what is going on.
If a variable is used where we need -XUndecidableInstances and where we don't,
we can similarly just remember the latter.
We thus define Visibility and NeedsUndecInstFlag below. These enumerations are
*ordered*, and we used their Ord instances. We then define VarUsage, which is just a pair
of a Visibility and a NeedsUndecInstFlag. (The visibility is irrelevant when a
variable is NotPresent, but this extra slack in the representation causes no
harm.) We finally define VarUsages as a mapping from variables to VarUsage.
Its Monoid instance combines two maps, using the Semigroup instance of VarUsage
to combine elements that are represented in both maps. In this way, we can
compositionally analyze types (and portions thereof).
To do the injectivity check:
1. We build VarUsages that represent the LHS (rather, the portion of the LHS
that is flagged as injective); each usage on the LHS is NotPresent, because we
have not yet looked at the RHS.
2. We also build a VarUsage for the RHS, done by injTyVarUsages.
3. We then combine these maps. Now, every variable in the injective components of the LHS
will be mapped to its correct usage (either NotPresent or perhaps needing
-XUndecidableInstances in order to be seen as injective).
4. We look up each var used in an injective argument on the LHS in
the map, making a list of tvs that should be determined by the RHS
but aren't.
5. We then return the set of bad variables, whether any of the bad
ones were used invisibly, and whether any bad ones need -XUndecidableInstances.
If -XUndecidableInstances is enabled, than a var that needs the flag
won't be bad, so it won't appear in this list.
6. We use all this information to produce a nice error message, (a) switching
on -fprint-explicit-kinds if appropriate and (b) telling the user about
-XUndecidableInstances if appropriate.
-}
-- | Return the set of type variables that a type family equation is
-- expected to be injective in but is not. Suppose we have @type family
-- F a b = r | r -> a@. Then any variables that appear free in the first
-- argument to F in an equation must be fixed by that equation's RHS.
-- This function returns all such variables that are not indeed fixed.
-- It also returns whether any of these variables appear invisibly
-- and whether -XUndecidableInstances would help.
-- See Note [Coverage condition for injective type families].
unusedInjTvsInRHS :: DynFlags
-> TyCon -- type family
-> [Type] -- LHS arguments
-> Type -- the RHS
-> ( TyVarSet
, Bool -- True <=> one or more variable is used invisibly
, Bool ) -- True <=> suggest -XUndecidableInstances
-- See Note [Verifying injectivity annotation] in FamInstEnv.
-- This function implements check (4) described there, further
-- described in Note [Coverage condition for injective type families].
-- In theory (and modulo the -XUndecidableInstances wrinkle),
-- instead of implementing this whole check in this way, we could
-- attempt to unify equation with itself. We would reject exactly the same
-- equations but this method gives us more precise error messages by returning
-- precise names of variables that are not mentioned in the RHS.
unusedInjTvsInRHS dflags tycon@(tyConInjectivityInfo -> Injective inj_list) lhs rhs =
-- Note [Coverage condition for injective type families], step 5
(bad_vars, any_invisible, suggest_undec)
where
undec_inst = xopt LangExt.UndecidableInstances dflags
inj_lhs = filterByList inj_list lhs
lhs_vars = tyCoVarsOfTypes inj_lhs
rhs_inj_vars = fvVarSet $ injectiveVarsOfType undec_inst rhs
bad_vars = lhs_vars `minusVarSet` rhs_inj_vars
any_bad = not $ isEmptyVarSet bad_vars
invis_vars = fvVarSet $ invisibleVarsOfTypes [mkTyConApp tycon lhs, rhs]
any_invisible = any_bad && (bad_vars `intersectsVarSet` invis_vars)
suggest_undec = any_bad &&
not undec_inst &&
(lhs_vars `subVarSet` fvVarSet (injectiveVarsOfType True rhs))
-- When the type family is not injective in any arguments
unusedInjTvsInRHS _ _ _ _ = (emptyVarSet, False, False)
---------------------------------------
-- Producing injectivity error messages
---------------------------------------
-- | Report error message for a pair of equations violating an injectivity
-- annotation. No error message if there are no branches.
reportConflictingInjectivityErrs :: TyCon -> [CoAxBranch] -> CoAxBranch -> TcM ()
reportConflictingInjectivityErrs _ [] _ = return ()
reportConflictingInjectivityErrs fam_tc (confEqn1:_) tyfamEqn
= addErrs [buildInjectivityError fam_tc herald (confEqn1 :| [tyfamEqn])]
where
herald = text "Type family equation right-hand sides overlap; this violates" $$
text "the family's injectivity annotation:"
-- | Injectivity error herald common to all injectivity errors.
injectivityErrorHerald :: SDoc
injectivityErrorHerald =
text "Type family equation violates the family's injectivity annotation."
-- | Report error message for equation with injective type variables unused in
-- the RHS. Note [Coverage condition for injective type families], step 6
reportUnusedInjectiveVarsErr :: TyCon
-> TyVarSet
-> Bool -- True <=> print invisible arguments
-> Bool -- True <=> suggest -XUndecidableInstances
-> CoAxBranch
-> TcM ()
reportUnusedInjectiveVarsErr fam_tc tvs has_kinds undec_inst tyfamEqn
= let (loc, doc) = buildInjectivityError fam_tc
(injectivityErrorHerald $$
herald $$
text "In the type family equation:")
(tyfamEqn :| [])
in addErrAt loc (pprWithExplicitKindsWhen has_kinds doc)
where
herald = sep [ what <+> text "variable" <>
pluralVarSet tvs <+> pprVarSet tvs (pprQuotedList . scopedSort)
, text "cannot be inferred from the right-hand side." ]
$$ extra
what | has_kinds = text "Type/kind"
| otherwise = text "Type"
extra | undec_inst = text "Using UndecidableInstances might help"
| otherwise = empty
-- | Report error message for equation that has a type family call at the top
-- level of RHS
reportTfHeadedErr :: TyCon -> CoAxBranch -> TcM ()
reportTfHeadedErr fam_tc branch
= addErrs [buildInjectivityError fam_tc
(injectivityErrorHerald $$
text "RHS of injective type family equation cannot" <+>
text "be a type family:")
(branch :| [])]
-- | Report error message for equation that has a bare type variable in the RHS
-- but LHS pattern is not a bare type variable.
reportBareVariableInRHSErr :: TyCon -> [Type] -> CoAxBranch -> TcM ()
reportBareVariableInRHSErr fam_tc tys branch
= addErrs [buildInjectivityError fam_tc
(injectivityErrorHerald $$
text "RHS of injective type family equation is a bare" <+>
text "type variable" $$
text "but these LHS type and kind patterns are not bare" <+>
text "variables:" <+> pprQuotedList tys)
(branch :| [])]
buildInjectivityError :: TyCon -> SDoc -> NonEmpty CoAxBranch -> (SrcSpan, SDoc)
buildInjectivityError fam_tc herald (eqn1 :| rest_eqns)
= ( coAxBranchSpan eqn1
, hang herald
2 (vcat (map (pprCoAxBranchUser fam_tc) (eqn1 : rest_eqns))) )
reportConflictInstErr :: FamInst -> [FamInstMatch] -> TcRn ()
reportConflictInstErr _ []
= return () -- No conflicts
reportConflictInstErr fam_inst (match1 : _)
| FamInstMatch { fim_instance = conf_inst } <- match1
, let sorted = sortWith getSpan [fam_inst, conf_inst]
fi1 = head sorted
span = coAxBranchSpan (coAxiomSingleBranch (famInstAxiom fi1))
= setSrcSpan span $ addErr $
hang (text "Conflicting family instance declarations:")
2 (vcat [ pprCoAxBranchUser (coAxiomTyCon ax) (coAxiomSingleBranch ax)
| fi <- sorted
, let ax = famInstAxiom fi ])
where
getSpan = getSrcLoc . famInstAxiom
-- The sortWith just arranges that instances are displayed in order
-- of source location, which reduced wobbling in error messages,
-- and is better for users
tcGetFamInstEnvs :: TcM FamInstEnvs
-- Gets both the external-package inst-env
-- and the home-pkg inst env (includes module being compiled)
tcGetFamInstEnvs
= do { eps <- getEps; env <- getGblEnv
; return (eps_fam_inst_env eps, tcg_fam_inst_env env) }
| sdiehl/ghc | compiler/typecheck/FamInst.hs | bsd-3-clause | 46,445 | 0 | 16 | 12,903 | 4,594 | 2,496 | 2,098 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables, DoAndIfThenElse #-}
module TastierMachine.Bytecode where
import qualified TastierMachine.Instructions as Instructions
import qualified Data.Binary.Get as G
import qualified Data.Binary.Put as P
load :: G.Get [Instructions.InstructionWord]
load = do
empty <- G.isEmpty
if empty then
return []
else do
opcode <- G.getWord8
let inst = toEnum $ fromIntegral $ fromEnum $ opcode
let arity = Instructions.arguments inst
case arity of
0 -> do
rest <- load
return ((Instructions.Nullary inst) : rest)
1 -> do
arg <- G.getWord16be
rest <- load
return ((Instructions.Unary inst
(toEnum $ fromIntegral $ fromEnum $ arg)) : rest)
2 -> do
arg0 <- G.getWord16be
arg1 <- G.getWord16be
rest <- load
return ((Instructions.Binary inst
(toEnum $ fromIntegral $ fromEnum $ arg0)
(toEnum $ fromIntegral $ fromEnum $ arg1)) : rest)
save :: [Instructions.InstructionWord] -> P.Put
save [] = return ()
save (i:rest) = do
case i of
(Instructions.Nullary inst) -> do
P.putWord8 $ fromInteger $ fromIntegral $ fromEnum inst
save rest
(Instructions.Unary inst a) -> do
P.putWord8 $ fromInteger $ fromIntegral $ fromEnum inst
P.putWord16be $ fromInteger $ fromIntegral $ fromEnum a
save rest
(Instructions.Binary inst a b) -> do
P.putWord8 $ fromInteger $ fromIntegral $ fromEnum inst
P.putWord16be $ fromInteger $ fromIntegral $ fromEnum a
P.putWord16be $ fromInteger $ fromIntegral $ fromEnum b
save rest
| houli/TastierMachine | src/TastierMachine/Bytecode.hs | bsd-3-clause | 1,650 | 0 | 23 | 448 | 548 | 269 | 279 | 46 | 4 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeApplications #-}
module Main where
import Criterion.Main
import Data.BigWord
import Data.Proxy
import GHC.TypeLits
main :: IO ()
main = defaultMain
[
bgroup "BigWord-Integer comparison"
[
bigWordIntegerTest 0 $ Proxy @(BigWord 0),
bigWordIntegerTest 1 $ Proxy @(BigWord 1),
bigWordIntegerTest 2 $ Proxy @(BigWord 2),
bigWordIntegerTest 3 $ Proxy @(BigWord 3),
bigWordIntegerTest 4 $ Proxy @(BigWord 4),
bigWordIntegerTest 5 $ Proxy @(BigWordStep 4 1),
bigWordIntegerTest 6 $ Proxy @(BigWordStep 4 2)
]
]
bigWordIntegerTest :: forall t. (Num t) => Integer -> Proxy t -> Benchmark
bigWordIntegerTest n zero =
env (twoNumbers n) $ \ ~(x, y) -> bgroup ("BigWord " ++ show n)
[
bench "BigWord" $ whnf (uncurry (+)) (fromInteger x :: t, fromInteger y :: t),
bench "Integer" $ whnf (uncurry (+)) (x, y)
]
twoNumbers :: Integer -> IO (Integer, Integer)
twoNumbers n = return (x,x)
where x = 2^(64 * 2^n) - 1
| ryanreich/bigint | test/Performance.hs | bsd-3-clause | 1,152 | 0 | 13 | 270 | 417 | 217 | 200 | 31 | 1 |
{-# OPTIONS -fglasgow-exts #-}
module TraversalLib (
module Data.Generics,
innermost
) where
import Data.Generics
import Control.Monad
import Prelude hiding (repeat)
-- Traversal schemes defined on top of Data.Generics
bottomup :: Data x => (forall y. Data y => y -> y) -> x -> x
bottomup f = f . gmapT (bottomup f)
innermost :: GenericM Maybe -> GenericT
innermost f = repeat (oncebu f)
repeat :: GenericM Maybe -> GenericT
repeat f x = maybe x (repeat f) (f x)
oncebu :: GenericM Maybe -> GenericM Maybe
oncebu f x = gmapMo (oncebu f) x `mplus` f x
| grammarware/slps | topics/fl/haskell/TraversalLib.hs | bsd-3-clause | 565 | 0 | 10 | 113 | 211 | 110 | 101 | -1 | -1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Type subsumption and unification
-}
{-# LANGUAGE CPP, MultiWayIf, TupleSections, ScopedTypeVariables #-}
module TcUnify (
-- Full-blown subsumption
tcWrapResult, tcWrapResultO, tcSkolemise, tcSkolemiseET,
tcSubTypeHR, tcSubTypeO, tcSubType_NC, tcSubTypeDS,
tcSubTypeDS_NC_O, tcSubTypeET,
checkConstraints, buildImplicationFor,
-- Various unifications
unifyType, unifyTheta, unifyKind,
uType, promoteTcType,
swapOverTyVars, canSolveByUnification,
--------------------------------
-- Holes
tcInferInst, tcInferNoInst,
matchExpectedListTy,
matchExpectedPArrTy,
matchExpectedTyConApp,
matchExpectedAppTy,
matchExpectedFunTys,
matchActualFunTys, matchActualFunTysPart,
matchExpectedFunKind,
occCheckExpand, metaTyVarUpdateOK,
occCheckForErrors, OccCheckResult(..)
) where
#include "HsVersions.h"
import GhcPrelude
import HsSyn
import TyCoRep
import TcMType
import TcRnMonad
import TcType
import Type
import Coercion
import TcEvidence
import Name ( isSystemName )
import Inst
import TyCon
import TysWiredIn
import TysPrim( tYPE )
import Var
import VarSet
import VarEnv
import ErrUtils
import DynFlags
import BasicTypes
import Bag
import Util
import Pair( pFst )
import qualified GHC.LanguageExtensions as LangExt
import Outputable
import Control.Monad
import Control.Arrow ( second )
{-
************************************************************************
* *
matchExpected functions
* *
************************************************************************
Note [Herald for matchExpectedFunTys]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The 'herald' always looks like:
"The equation(s) for 'f' have"
"The abstraction (\x.e) takes"
"The section (+ x) expects"
"The function 'f' is applied to"
This is used to construct a message of form
The abstraction `\Just 1 -> ...' takes two arguments
but its type `Maybe a -> a' has only one
The equation(s) for `f' have two arguments
but its type `Maybe a -> a' has only one
The section `(f 3)' requires 'f' to take two arguments
but its type `Int -> Int' has only one
The function 'f' is applied to two arguments
but its type `Int -> Int' has only one
When visible type applications (e.g., `f @Int 1 2`, as in #13902) enter the
picture, we have a choice in deciding whether to count the type applications as
proper arguments:
The function 'f' is applied to one visible type argument
and two value arguments
but its type `forall a. a -> a` has only one visible type argument
and one value argument
Or whether to include the type applications as part of the herald itself:
The expression 'f @Int' is applied to two arguments
but its type `Int -> Int` has only one
The latter is easier to implement and is arguably easier to understand, so we
choose to implement that option.
Note [matchExpectedFunTys]
~~~~~~~~~~~~~~~~~~~~~~~~~~
matchExpectedFunTys checks that a sigma has the form
of an n-ary function. It passes the decomposed type to the
thing_inside, and returns a wrapper to coerce between the two types
It's used wherever a language construct must have a functional type,
namely:
A lambda expression
A function definition
An operator section
This function must be written CPS'd because it needs to fill in the
ExpTypes produced for arguments before it can fill in the ExpType
passed in.
-}
-- Use this one when you have an "expected" type.
matchExpectedFunTys :: forall a.
SDoc -- See Note [Herald for matchExpectedFunTys]
-> Arity
-> ExpRhoType -- deeply skolemised
-> ([ExpSigmaType] -> ExpRhoType -> TcM a)
-- must fill in these ExpTypes here
-> TcM (a, HsWrapper)
-- If matchExpectedFunTys n ty = (_, wrap)
-- then wrap : (t1 -> ... -> tn -> ty_r) ~> ty,
-- where [t1, ..., tn], ty_r are passed to the thing_inside
matchExpectedFunTys herald arity orig_ty thing_inside
= case orig_ty of
Check ty -> go [] arity ty
_ -> defer [] arity orig_ty
where
go acc_arg_tys 0 ty
= do { result <- thing_inside (reverse acc_arg_tys) (mkCheckExpType ty)
; return (result, idHsWrapper) }
go acc_arg_tys n ty
| Just ty' <- tcView ty = go acc_arg_tys n ty'
go acc_arg_tys n (FunTy arg_ty res_ty)
= ASSERT( not (isPredTy arg_ty) )
do { (result, wrap_res) <- go (mkCheckExpType arg_ty : acc_arg_tys)
(n-1) res_ty
; return ( result
, mkWpFun idHsWrapper wrap_res arg_ty res_ty doc ) }
where
doc = text "When inferring the argument type of a function with type" <+>
quotes (ppr orig_ty)
go acc_arg_tys n ty@(TyVarTy tv)
| isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty' -> go acc_arg_tys n ty'
Flexi -> defer acc_arg_tys n (mkCheckExpType ty) }
-- In all other cases we bale out into ordinary unification
-- However unlike the meta-tyvar case, we are sure that the
-- number of arguments doesn't match arity of the original
-- type, so we can add a bit more context to the error message
-- (cf Trac #7869).
--
-- It is not always an error, because specialized type may have
-- different arity, for example:
--
-- > f1 = f2 'a'
-- > f2 :: Monad m => m Bool
-- > f2 = undefined
--
-- But in that case we add specialized type into error context
-- anyway, because it may be useful. See also Trac #9605.
go acc_arg_tys n ty = addErrCtxtM mk_ctxt $
defer acc_arg_tys n (mkCheckExpType ty)
------------
defer :: [ExpSigmaType] -> Arity -> ExpRhoType -> TcM (a, HsWrapper)
defer acc_arg_tys n fun_ty
= do { more_arg_tys <- replicateM n newInferExpTypeNoInst
; res_ty <- newInferExpTypeInst
; result <- thing_inside (reverse acc_arg_tys ++ more_arg_tys) res_ty
; more_arg_tys <- mapM readExpType more_arg_tys
; res_ty <- readExpType res_ty
; let unif_fun_ty = mkFunTys more_arg_tys res_ty
; wrap <- tcSubTypeDS AppOrigin GenSigCtxt unif_fun_ty fun_ty
-- Not a good origin at all :-(
; return (result, wrap) }
------------
mk_ctxt :: TidyEnv -> TcM (TidyEnv, MsgDoc)
mk_ctxt env = do { (env', ty) <- zonkTidyTcType env orig_tc_ty
; let (args, _) = tcSplitFunTys ty
n_actual = length args
(env'', orig_ty') = tidyOpenType env' orig_tc_ty
; return ( env''
, mk_fun_tys_msg orig_ty' ty n_actual arity herald) }
where
orig_tc_ty = checkingExpType "matchExpectedFunTys" orig_ty
-- this is safe b/c we're called from "go"
-- Like 'matchExpectedFunTys', but used when you have an "actual" type,
-- for example in function application
matchActualFunTys :: SDoc -- See Note [Herald for matchExpectedFunTys]
-> CtOrigin
-> Maybe (HsExpr GhcRn) -- the thing with type TcSigmaType
-> Arity
-> TcSigmaType
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
-- If matchActualFunTys n ty = (wrap, [t1,..,tn], ty_r)
-- then wrap : ty ~> (t1 -> ... -> tn -> ty_r)
matchActualFunTys herald ct_orig mb_thing arity ty
= matchActualFunTysPart herald ct_orig mb_thing arity ty [] arity
-- | Variant of 'matchActualFunTys' that works when supplied only part
-- (that is, to the right of some arrows) of the full function type
matchActualFunTysPart :: SDoc -- See Note [Herald for matchExpectedFunTys]
-> CtOrigin
-> Maybe (HsExpr GhcRn) -- the thing with type TcSigmaType
-> Arity
-> TcSigmaType
-> [TcSigmaType] -- reversed args. See (*) below.
-> Arity -- overall arity of the function, for errs
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
matchActualFunTysPart herald ct_orig mb_thing arity orig_ty
orig_old_args full_arity
= go arity orig_old_args orig_ty
-- Does not allocate unnecessary meta variables: if the input already is
-- a function, we just take it apart. Not only is this efficient,
-- it's important for higher rank: the argument might be of form
-- (forall a. ty) -> other
-- If allocated (fresh-meta-var1 -> fresh-meta-var2) and unified, we'd
-- hide the forall inside a meta-variable
-- (*) Sometimes it's necessary to call matchActualFunTys with only part
-- (that is, to the right of some arrows) of the type of the function in
-- question. (See TcExpr.tcArgs.) This argument is the reversed list of
-- arguments already seen (that is, not part of the TcSigmaType passed
-- in elsewhere).
where
-- This function has a bizarre mechanic: it accumulates arguments on
-- the way down and also builds an argument list on the way up. Why:
-- 1. The returns args list and the accumulated args list might be different.
-- The accumulated args include all the arg types for the function,
-- including those from before this function was called. The returned
-- list should include only those arguments produced by this call of
-- matchActualFunTys
--
-- 2. The HsWrapper can be built only on the way up. It seems (more)
-- bizarre to build the HsWrapper but not the arg_tys.
--
-- Refactoring is welcome.
go :: Arity
-> [TcSigmaType] -- accumulator of arguments (reversed)
-> TcSigmaType -- the remainder of the type as we're processing
-> TcM (HsWrapper, [TcSigmaType], TcSigmaType)
go 0 _ ty = return (idHsWrapper, [], ty)
go n acc_args ty
| not (null tvs && null theta)
= do { (wrap1, rho) <- topInstantiate ct_orig ty
; (wrap2, arg_tys, res_ty) <- go n acc_args rho
; return (wrap2 <.> wrap1, arg_tys, res_ty) }
where
(tvs, theta, _) = tcSplitSigmaTy ty
go n acc_args ty
| Just ty' <- tcView ty = go n acc_args ty'
go n acc_args (FunTy arg_ty res_ty)
= ASSERT( not (isPredTy arg_ty) )
do { (wrap_res, tys, ty_r) <- go (n-1) (arg_ty : acc_args) res_ty
; return ( mkWpFun idHsWrapper wrap_res arg_ty ty_r doc
, arg_ty : tys, ty_r ) }
where
doc = text "When inferring the argument type of a function with type" <+>
quotes (ppr orig_ty)
go n acc_args ty@(TyVarTy tv)
| isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty' -> go n acc_args ty'
Flexi -> defer n ty }
-- In all other cases we bale out into ordinary unification
-- However unlike the meta-tyvar case, we are sure that the
-- number of arguments doesn't match arity of the original
-- type, so we can add a bit more context to the error message
-- (cf Trac #7869).
--
-- It is not always an error, because specialized type may have
-- different arity, for example:
--
-- > f1 = f2 'a'
-- > f2 :: Monad m => m Bool
-- > f2 = undefined
--
-- But in that case we add specialized type into error context
-- anyway, because it may be useful. See also Trac #9605.
go n acc_args ty = addErrCtxtM (mk_ctxt (reverse acc_args) ty) $
defer n ty
------------
defer n fun_ty
= do { arg_tys <- replicateM n newOpenFlexiTyVarTy
; res_ty <- newOpenFlexiTyVarTy
; let unif_fun_ty = mkFunTys arg_tys res_ty
; co <- unifyType mb_thing fun_ty unif_fun_ty
; return (mkWpCastN co, arg_tys, res_ty) }
------------
mk_ctxt :: [TcSigmaType] -> TcSigmaType -> TidyEnv -> TcM (TidyEnv, MsgDoc)
mk_ctxt arg_tys res_ty env
= do { let ty = mkFunTys arg_tys res_ty
; (env1, zonked) <- zonkTidyTcType env ty
-- zonking might change # of args
; let (zonked_args, _) = tcSplitFunTys zonked
n_actual = length zonked_args
(env2, unzonked) = tidyOpenType env1 ty
; return ( env2
, mk_fun_tys_msg unzonked zonked n_actual full_arity herald) }
mk_fun_tys_msg :: TcType -- the full type passed in (unzonked)
-> TcType -- the full type passed in (zonked)
-> Arity -- the # of args found
-> Arity -- the # of args wanted
-> SDoc -- overall herald
-> SDoc
mk_fun_tys_msg full_ty ty n_args full_arity herald
= herald <+> speakNOf full_arity (text "argument") <> comma $$
if n_args == full_arity
then text "its type is" <+> quotes (pprType full_ty) <>
comma $$
text "it is specialized to" <+> quotes (pprType ty)
else sep [text "but its type" <+> quotes (pprType ty),
if n_args == 0 then text "has none"
else text "has only" <+> speakN n_args]
----------------------
matchExpectedListTy :: TcRhoType -> TcM (TcCoercionN, TcRhoType)
-- Special case for lists
matchExpectedListTy exp_ty
= do { (co, [elt_ty]) <- matchExpectedTyConApp listTyCon exp_ty
; return (co, elt_ty) }
----------------------
matchExpectedPArrTy :: TcRhoType -> TcM (TcCoercionN, TcRhoType)
-- Special case for parrs
matchExpectedPArrTy exp_ty
= do { (co, [elt_ty]) <- matchExpectedTyConApp parrTyCon exp_ty
; return (co, elt_ty) }
---------------------
matchExpectedTyConApp :: TyCon -- T :: forall kv1 ... kvm. k1 -> ... -> kn -> *
-> TcRhoType -- orig_ty
-> TcM (TcCoercionN, -- T k1 k2 k3 a b c ~N orig_ty
[TcSigmaType]) -- Element types, k1 k2 k3 a b c
-- It's used for wired-in tycons, so we call checkWiredInTyCon
-- Precondition: never called with FunTyCon
-- Precondition: input type :: *
-- Postcondition: (T k1 k2 k3 a b c) is well-kinded
matchExpectedTyConApp tc orig_ty
= ASSERT(tc /= funTyCon) go orig_ty
where
go ty
| Just ty' <- tcView ty
= go ty'
go ty@(TyConApp tycon args)
| tc == tycon -- Common case
= return (mkTcNomReflCo ty, args)
go (TyVarTy tv)
| isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty -> go ty
Flexi -> defer }
go _ = defer
-- If the common case does not occur, instantiate a template
-- T k1 .. kn t1 .. tm, and unify with the original type
-- Doing it this way ensures that the types we return are
-- kind-compatible with T. For example, suppose we have
-- matchExpectedTyConApp T (f Maybe)
-- where data T a = MkT a
-- Then we don't want to instantiate T's data constructors with
-- (a::*) ~ Maybe
-- because that'll make types that are utterly ill-kinded.
-- This happened in Trac #7368
defer
= do { (_, arg_tvs) <- newMetaTyVars (tyConTyVars tc)
; traceTc "matchExpectedTyConApp" (ppr tc $$ ppr (tyConTyVars tc) $$ ppr arg_tvs)
; let args = mkTyVarTys arg_tvs
tc_template = mkTyConApp tc args
; co <- unifyType Nothing tc_template orig_ty
; return (co, args) }
----------------------
matchExpectedAppTy :: TcRhoType -- orig_ty
-> TcM (TcCoercion, -- m a ~N orig_ty
(TcSigmaType, TcSigmaType)) -- Returns m, a
-- If the incoming type is a mutable type variable of kind k, then
-- matchExpectedAppTy returns a new type variable (m: * -> k); note the *.
matchExpectedAppTy orig_ty
= go orig_ty
where
go ty
| Just ty' <- tcView ty = go ty'
| Just (fun_ty, arg_ty) <- tcSplitAppTy_maybe ty
= return (mkTcNomReflCo orig_ty, (fun_ty, arg_ty))
go (TyVarTy tv)
| isMetaTyVar tv
= do { cts <- readMetaTyVar tv
; case cts of
Indirect ty -> go ty
Flexi -> defer }
go _ = defer
-- Defer splitting by generating an equality constraint
defer
= do { ty1 <- newFlexiTyVarTy kind1
; ty2 <- newFlexiTyVarTy kind2
; co <- unifyType Nothing (mkAppTy ty1 ty2) orig_ty
; return (co, (ty1, ty2)) }
orig_kind = typeKind orig_ty
kind1 = mkFunTy liftedTypeKind orig_kind
kind2 = liftedTypeKind -- m :: * -> k
-- arg type :: *
{-
************************************************************************
* *
Subsumption checking
* *
************************************************************************
Note [Subsumption checking: tcSubType]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
All the tcSubType calls have the form
tcSubType actual_ty expected_ty
which checks
actual_ty <= expected_ty
That is, that a value of type actual_ty is acceptable in
a place expecting a value of type expected_ty. I.e. that
actual ty is more polymorphic than expected_ty
It returns a coercion function
co_fn :: actual_ty ~ expected_ty
which takes an HsExpr of type actual_ty into one of type
expected_ty.
These functions do not actually check for subsumption. They check if
expected_ty is an appropriate annotation to use for something of type
actual_ty. This difference matters when thinking about visible type
application. For example,
forall a. a -> forall b. b -> b
DOES NOT SUBSUME
forall a b. a -> b -> b
because the type arguments appear in a different order. (Neither does
it work the other way around.) BUT, these types are appropriate annotations
for one another. Because the user directs annotations, it's OK if some
arguments shuffle around -- after all, it's what the user wants.
Bottom line: none of this changes with visible type application.
There are a number of wrinkles (below).
Notice that Wrinkle 1 and 2 both require eta-expansion, which technically
may increase termination. We just put up with this, in exchange for getting
more predictable type inference.
Wrinkle 1: Note [Deep skolemisation]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want (forall a. Int -> a -> a) <= (Int -> forall a. a->a)
(see section 4.6 of "Practical type inference for higher rank types")
So we must deeply-skolemise the RHS before we instantiate the LHS.
That is why tc_sub_type starts with a call to tcSkolemise (which does the
deep skolemisation), and then calls the DS variant (which assumes
that expected_ty is deeply skolemised)
Wrinkle 2: Note [Co/contra-variance of subsumption checking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider g :: (Int -> Int) -> Int
f1 :: (forall a. a -> a) -> Int
f1 = g
f2 :: (forall a. a -> a) -> Int
f2 x = g x
f2 will typecheck, and it would be odd/fragile if f1 did not.
But f1 will only typecheck if we have that
(Int->Int) -> Int <= (forall a. a->a) -> Int
And that is only true if we do the full co/contravariant thing
in the subsumption check. That happens in the FunTy case of
tcSubTypeDS_NC_O, and is the sole reason for the WpFun form of
HsWrapper.
Another powerful reason for doing this co/contra stuff is visible
in Trac #9569, involving instantiation of constraint variables,
and again involving eta-expansion.
Wrinkle 3: Note [Higher rank types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider tc150:
f y = \ (x::forall a. a->a). blah
The following happens:
* We will infer the type of the RHS, ie with a res_ty = alpha.
* Then the lambda will split alpha := beta -> gamma.
* And then we'll check tcSubType IsSwapped beta (forall a. a->a)
So it's important that we unify beta := forall a. a->a, rather than
skolemising the type.
-}
-- | Call this variant when you are in a higher-rank situation and
-- you know the right-hand type is deeply skolemised.
tcSubTypeHR :: CtOrigin -- ^ of the actual type
-> Maybe (HsExpr GhcRn) -- ^ If present, it has type ty_actual
-> TcSigmaType -> ExpRhoType -> TcM HsWrapper
tcSubTypeHR orig = tcSubTypeDS_NC_O orig GenSigCtxt
------------------------
tcSubTypeET :: CtOrigin -> UserTypeCtxt
-> ExpSigmaType -> TcSigmaType -> TcM HsWrapper
-- If wrap = tc_sub_type_et t1 t2
-- => wrap :: t1 ~> t2
tcSubTypeET orig ctxt (Check ty_actual) ty_expected
= tc_sub_tc_type eq_orig orig ctxt ty_actual ty_expected
where
eq_orig = TypeEqOrigin { uo_actual = ty_expected
, uo_expected = ty_actual
, uo_thing = Nothing
, uo_visible = True }
tcSubTypeET _ _ (Infer inf_res) ty_expected
= ASSERT2( not (ir_inst inf_res), ppr inf_res $$ ppr ty_expected )
do { co <- fillInferResult ty_expected inf_res
; return (mkWpCastN (mkTcSymCo co)) }
------------------------
tcSubTypeO :: CtOrigin -- ^ of the actual type
-> UserTypeCtxt -- ^ of the expected type
-> TcSigmaType
-> ExpRhoType
-> TcM HsWrapper
tcSubTypeO orig ctxt ty_actual ty_expected
= addSubTypeCtxt ty_actual ty_expected $
do { traceTc "tcSubTypeDS_O" (vcat [ pprCtOrigin orig
, pprUserTypeCtxt ctxt
, ppr ty_actual
, ppr ty_expected ])
; tcSubTypeDS_NC_O orig ctxt Nothing ty_actual ty_expected }
addSubTypeCtxt :: TcType -> ExpType -> TcM a -> TcM a
addSubTypeCtxt ty_actual ty_expected thing_inside
| isRhoTy ty_actual -- If there is no polymorphism involved, the
, isRhoExpTy ty_expected -- TypeEqOrigin stuff (added by the _NC functions)
= thing_inside -- gives enough context by itself
| otherwise
= addErrCtxtM mk_msg thing_inside
where
mk_msg tidy_env
= do { (tidy_env, ty_actual) <- zonkTidyTcType tidy_env ty_actual
-- might not be filled if we're debugging. ugh.
; mb_ty_expected <- readExpType_maybe ty_expected
; (tidy_env, ty_expected) <- case mb_ty_expected of
Just ty -> second mkCheckExpType <$>
zonkTidyTcType tidy_env ty
Nothing -> return (tidy_env, ty_expected)
; ty_expected <- readExpType ty_expected
; (tidy_env, ty_expected) <- zonkTidyTcType tidy_env ty_expected
; let msg = vcat [ hang (text "When checking that:")
4 (ppr ty_actual)
, nest 2 (hang (text "is more polymorphic than:")
2 (ppr ty_expected)) ]
; return (tidy_env, msg) }
---------------
-- The "_NC" variants do not add a typechecker-error context;
-- the caller is assumed to do that
tcSubType_NC :: UserTypeCtxt -> TcSigmaType -> TcSigmaType -> TcM HsWrapper
-- Checks that actual <= expected
-- Returns HsWrapper :: actual ~ expected
tcSubType_NC ctxt ty_actual ty_expected
= do { traceTc "tcSubType_NC" (vcat [pprUserTypeCtxt ctxt, ppr ty_actual, ppr ty_expected])
; tc_sub_tc_type origin origin ctxt ty_actual ty_expected }
where
origin = TypeEqOrigin { uo_actual = ty_actual
, uo_expected = ty_expected
, uo_thing = Nothing
, uo_visible = True }
tcSubTypeDS :: CtOrigin -> UserTypeCtxt -> TcSigmaType -> ExpRhoType -> TcM HsWrapper
-- Just like tcSubType, but with the additional precondition that
-- ty_expected is deeply skolemised (hence "DS")
tcSubTypeDS orig ctxt ty_actual ty_expected
= addSubTypeCtxt ty_actual ty_expected $
do { traceTc "tcSubTypeDS_NC" (vcat [pprUserTypeCtxt ctxt, ppr ty_actual, ppr ty_expected])
; tcSubTypeDS_NC_O orig ctxt Nothing ty_actual ty_expected }
tcSubTypeDS_NC_O :: CtOrigin -- origin used for instantiation only
-> UserTypeCtxt
-> Maybe (HsExpr GhcRn)
-> TcSigmaType -> ExpRhoType -> TcM HsWrapper
-- Just like tcSubType, but with the additional precondition that
-- ty_expected is deeply skolemised
tcSubTypeDS_NC_O inst_orig ctxt m_thing ty_actual ty_expected
= case ty_expected of
Infer inf_res -> fillInferResult_Inst inst_orig ty_actual inf_res
Check ty -> tc_sub_type_ds eq_orig inst_orig ctxt ty_actual ty
where
eq_orig = TypeEqOrigin { uo_actual = ty_actual, uo_expected = ty
, uo_thing = ppr <$> m_thing
, uo_visible = True }
---------------
tc_sub_tc_type :: CtOrigin -- used when calling uType
-> CtOrigin -- used when instantiating
-> UserTypeCtxt -> TcSigmaType -> TcSigmaType -> TcM HsWrapper
-- If wrap = tc_sub_type t1 t2
-- => wrap :: t1 ~> t2
tc_sub_tc_type eq_orig inst_orig ctxt ty_actual ty_expected
| definitely_poly ty_expected -- See Note [Don't skolemise unnecessarily]
, not (possibly_poly ty_actual)
= do { traceTc "tc_sub_tc_type (drop to equality)" $
vcat [ text "ty_actual =" <+> ppr ty_actual
, text "ty_expected =" <+> ppr ty_expected ]
; mkWpCastN <$>
uType TypeLevel eq_orig ty_actual ty_expected }
| otherwise -- This is the general case
= do { traceTc "tc_sub_tc_type (general case)" $
vcat [ text "ty_actual =" <+> ppr ty_actual
, text "ty_expected =" <+> ppr ty_expected ]
; (sk_wrap, inner_wrap) <- tcSkolemise ctxt ty_expected $
\ _ sk_rho ->
tc_sub_type_ds eq_orig inst_orig ctxt
ty_actual sk_rho
; return (sk_wrap <.> inner_wrap) }
where
possibly_poly ty
| isForAllTy ty = True
| Just (_, res) <- splitFunTy_maybe ty = possibly_poly res
| otherwise = False
-- NB *not* tcSplitFunTy, because here we want
-- to decompose type-class arguments too
definitely_poly ty
| (tvs, theta, tau) <- tcSplitSigmaTy ty
, (tv:_) <- tvs
, null theta
, isInsolubleOccursCheck NomEq tv tau
= True
| otherwise
= False
{- Note [Don't skolemise unnecessarily]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are trying to solve
(Char->Char) <= (forall a. a->a)
We could skolemise the 'forall a', and then complain
that (Char ~ a) is insoluble; but that's a pretty obscure
error. It's better to say that
(Char->Char) ~ (forall a. a->a)
fails.
So roughly:
* if the ty_expected has an outermost forall
(i.e. skolemisation is the next thing we'd do)
* and the ty_actual has no top-level polymorphism (but looking deeply)
then we can revert to simple equality. But we need to be careful.
These examples are all fine:
* (Char -> forall a. a->a) <= (forall a. Char -> a -> a)
Polymorphism is buried in ty_actual
* (Char->Char) <= (forall a. Char -> Char)
ty_expected isn't really polymorphic
* (Char->Char) <= (forall a. (a~Char) => a -> a)
ty_expected isn't really polymorphic
* (Char->Char) <= (forall a. F [a] Char -> Char)
where type instance F [x] t = t
ty_expected isn't really polymorphic
If we prematurely go to equality we'll reject a program we should
accept (e.g. Trac #13752). So the test (which is only to improve
error message) is very conservative:
* ty_actual is /definitely/ monomorphic
* ty_expected is /definitely/ polymorphic
-}
---------------
tc_sub_type_ds :: CtOrigin -- used when calling uType
-> CtOrigin -- used when instantiating
-> UserTypeCtxt -> TcSigmaType -> TcRhoType -> TcM HsWrapper
-- If wrap = tc_sub_type_ds t1 t2
-- => wrap :: t1 ~> t2
-- Here is where the work actually happens!
-- Precondition: ty_expected is deeply skolemised
tc_sub_type_ds eq_orig inst_orig ctxt ty_actual ty_expected
= do { traceTc "tc_sub_type_ds" $
vcat [ text "ty_actual =" <+> ppr ty_actual
, text "ty_expected =" <+> ppr ty_expected ]
; go ty_actual ty_expected }
where
go ty_a ty_e | Just ty_a' <- tcView ty_a = go ty_a' ty_e
| Just ty_e' <- tcView ty_e = go ty_a ty_e'
go (TyVarTy tv_a) ty_e
= do { lookup_res <- lookupTcTyVar tv_a
; case lookup_res of
Filled ty_a' ->
do { traceTc "tcSubTypeDS_NC_O following filled act meta-tyvar:"
(ppr tv_a <+> text "-->" <+> ppr ty_a')
; tc_sub_type_ds eq_orig inst_orig ctxt ty_a' ty_e }
Unfilled _ -> unify }
-- Historical note (Sept 16): there was a case here for
-- go ty_a (TyVarTy alpha)
-- which, in the impredicative case unified alpha := ty_a
-- where th_a is a polytype. Not only is this probably bogus (we
-- simply do not have decent story for imprdicative types), but it
-- caused Trac #12616 because (also bizarrely) 'deriving' code had
-- -XImpredicativeTypes on. I deleted the entire case.
go (FunTy act_arg act_res) (FunTy exp_arg exp_res)
| not (isPredTy act_arg)
, not (isPredTy exp_arg)
= -- See Note [Co/contra-variance of subsumption checking]
do { res_wrap <- tc_sub_type_ds eq_orig inst_orig ctxt act_res exp_res
; arg_wrap <- tc_sub_tc_type eq_orig given_orig ctxt exp_arg act_arg
; return (mkWpFun arg_wrap res_wrap exp_arg exp_res doc) }
-- arg_wrap :: exp_arg ~> act_arg
-- res_wrap :: act-res ~> exp_res
where
given_orig = GivenOrigin (SigSkol GenSigCtxt exp_arg [])
doc = text "When checking that" <+> quotes (ppr ty_actual) <+>
text "is more polymorphic than" <+> quotes (ppr ty_expected)
go ty_a ty_e
| let (tvs, theta, _) = tcSplitSigmaTy ty_a
, not (null tvs && null theta)
= do { (in_wrap, in_rho) <- topInstantiate inst_orig ty_a
; body_wrap <- tc_sub_type_ds
(eq_orig { uo_actual = in_rho
, uo_expected = ty_expected })
inst_orig ctxt in_rho ty_e
; return (body_wrap <.> in_wrap) }
| otherwise -- Revert to unification
= inst_and_unify
-- It's still possible that ty_actual has nested foralls. Instantiate
-- these, as there's no way unification will succeed with them in.
-- See typecheck/should_compile/T11305 for an example of when this
-- is important. The problem is that we're checking something like
-- a -> forall b. b -> b <= alpha beta gamma
-- where we end up with alpha := (->)
inst_and_unify = do { (wrap, rho_a) <- deeplyInstantiate inst_orig ty_actual
-- if we haven't recurred through an arrow, then
-- the eq_orig will list ty_actual. In this case,
-- we want to update the origin to reflect the
-- instantiation. If we *have* recurred through
-- an arrow, it's better not to update.
; let eq_orig' = case eq_orig of
TypeEqOrigin { uo_actual = orig_ty_actual }
| orig_ty_actual `tcEqType` ty_actual
, not (isIdHsWrapper wrap)
-> eq_orig { uo_actual = rho_a }
_ -> eq_orig
; cow <- uType TypeLevel eq_orig' rho_a ty_expected
; return (mkWpCastN cow <.> wrap) }
-- use versions without synonyms expanded
unify = mkWpCastN <$> uType TypeLevel eq_orig ty_actual ty_expected
-----------------
-- needs both un-type-checked (for origins) and type-checked (for wrapping)
-- expressions
tcWrapResult :: HsExpr GhcRn -> HsExpr GhcTcId -> TcSigmaType -> ExpRhoType
-> TcM (HsExpr GhcTcId)
tcWrapResult rn_expr = tcWrapResultO (exprCtOrigin rn_expr) rn_expr
-- | Sometimes we don't have a @HsExpr Name@ to hand, and this is more
-- convenient.
tcWrapResultO :: CtOrigin -> HsExpr GhcRn -> HsExpr GhcTcId -> TcSigmaType -> ExpRhoType
-> TcM (HsExpr GhcTcId)
tcWrapResultO orig rn_expr expr actual_ty res_ty
= do { traceTc "tcWrapResult" (vcat [ text "Actual: " <+> ppr actual_ty
, text "Expected:" <+> ppr res_ty ])
; cow <- tcSubTypeDS_NC_O orig GenSigCtxt
(Just rn_expr) actual_ty res_ty
; return (mkHsWrap cow expr) }
{- **********************************************************************
%* *
ExpType functions: tcInfer, fillInferResult
%* *
%********************************************************************* -}
-- | Infer a type using a fresh ExpType
-- See also Note [ExpType] in TcMType
-- Does not attempt to instantiate the inferred type
tcInferNoInst :: (ExpSigmaType -> TcM a) -> TcM (a, TcSigmaType)
tcInferNoInst = tcInfer False
tcInferInst :: (ExpRhoType -> TcM a) -> TcM (a, TcRhoType)
tcInferInst = tcInfer True
tcInfer :: Bool -> (ExpSigmaType -> TcM a) -> TcM (a, TcSigmaType)
tcInfer instantiate tc_check
= do { res_ty <- newInferExpType instantiate
; result <- tc_check res_ty
; res_ty <- readExpType res_ty
; return (result, res_ty) }
fillInferResult_Inst :: CtOrigin -> TcType -> InferResult -> TcM HsWrapper
-- If wrap = fillInferResult_Inst t1 t2
-- => wrap :: t1 ~> t2
-- See Note [Deep instantiation of InferResult]
fillInferResult_Inst orig ty inf_res@(IR { ir_inst = instantiate_me })
| instantiate_me
= do { (wrap, rho) <- deeplyInstantiate orig ty
; co <- fillInferResult rho inf_res
; return (mkWpCastN co <.> wrap) }
| otherwise
= do { co <- fillInferResult ty inf_res
; return (mkWpCastN co) }
fillInferResult :: TcType -> InferResult -> TcM TcCoercionN
-- If wrap = fillInferResult t1 t2
-- => wrap :: t1 ~> t2
fillInferResult orig_ty (IR { ir_uniq = u, ir_lvl = res_lvl
, ir_ref = ref })
= do { (ty_co, ty_to_fill_with) <- promoteTcType res_lvl orig_ty
; traceTc "Filling ExpType" $
ppr u <+> text ":=" <+> ppr ty_to_fill_with
; when debugIsOn (check_hole ty_to_fill_with)
; writeTcRef ref (Just ty_to_fill_with)
; return ty_co }
where
check_hole ty -- Debug check only
= do { let ty_lvl = tcTypeLevel ty
; MASSERT2( not (ty_lvl `strictlyDeeperThan` res_lvl),
ppr u $$ ppr res_lvl $$ ppr ty_lvl $$
ppr ty <+> ppr (typeKind ty) $$ ppr orig_ty )
; cts <- readTcRef ref
; case cts of
Just already_there -> pprPanic "writeExpType"
(vcat [ ppr u
, ppr ty
, ppr already_there ])
Nothing -> return () }
{- Note [Deep instantiation of InferResult]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In some cases we want to deeply instantiate before filling in
an InferResult, and in some cases not. That's why InferReult
has the ir_inst flag.
* ir_inst = True: deeply instantiate
Consider
f x = (*)
We want to instantiate the type of (*) before returning, else we
will infer the type
f :: forall {a}. a -> forall b. Num b => b -> b -> b
This is surely confusing for users.
And worse, the the monomorphism restriction won't properly. The MR is
dealt with in simplifyInfer, and simplifyInfer has no way of
instantiating. This could perhaps be worked around, but it may be
hard to know even when instantiation should happen.
Another reason. Consider
f :: (?x :: Int) => a -> a
g y = let ?x = 3::Int in f
Here want to instantiate f's type so that the ?x::Int constraint
gets discharged by the enclosing implicit-parameter binding.
* ir_inst = False: do not instantiate
Consider this (which uses visible type application):
(let { f :: forall a. a -> a; f x = x } in f) @Int
We'll call TcExpr.tcInferFun to infer the type of the (let .. in f)
And we don't want to instantite the type of 'f' when we reach it,
else the outer visible type application won't work
-}
{- *********************************************************************
* *
Promoting types
* *
********************************************************************* -}
promoteTcType :: TcLevel -> TcType -> TcM (TcCoercion, TcType)
-- See Note [Promoting a type]
-- promoteTcType level ty = (co, ty')
-- * Returns ty' whose max level is just 'level'
-- and whose kind is ~# to the kind of 'ty'
-- and whose kind has form TYPE rr
-- * and co :: ty ~ ty'
-- * and emits constraints to justify the coercion
promoteTcType dest_lvl ty
= do { cur_lvl <- getTcLevel
; if (cur_lvl `sameDepthAs` dest_lvl)
then dont_promote_it
else promote_it }
where
promote_it :: TcM (TcCoercion, TcType)
promote_it -- Emit a constraint (alpha :: TYPE rr) ~ ty
-- where alpha and rr are fresh and from level dest_lvl
= do { rr <- newMetaTyVarTyAtLevel dest_lvl runtimeRepTy
; prom_ty <- newMetaTyVarTyAtLevel dest_lvl (tYPE rr)
; let eq_orig = TypeEqOrigin { uo_actual = ty
, uo_expected = prom_ty
, uo_thing = Nothing
, uo_visible = False }
; co <- emitWantedEq eq_orig TypeLevel Nominal ty prom_ty
; return (co, prom_ty) }
dont_promote_it :: TcM (TcCoercion, TcType)
dont_promote_it -- Check that ty :: TYPE rr, for some (fresh) rr
= do { res_kind <- newOpenTypeKind
; let ty_kind = typeKind ty
kind_orig = TypeEqOrigin { uo_actual = ty_kind
, uo_expected = res_kind
, uo_thing = Nothing
, uo_visible = False }
; ki_co <- uType KindLevel kind_orig (typeKind ty) res_kind
; let co = mkTcNomReflCo ty `mkTcCoherenceRightCo` ki_co
; return (co, ty `mkCastTy` ki_co) }
{- Note [Promoting a type]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (Trac #12427)
data T where
MkT :: (Int -> Int) -> a -> T
h y = case y of MkT v w -> v
We'll infer the RHS type with an expected type ExpType of
(IR { ir_lvl = l, ir_ref = ref, ... )
where 'l' is the TcLevel of the RHS of 'h'. Then the MkT pattern
match will increase the level, so we'll end up in tcSubType, trying to
unify the type of v,
v :: Int -> Int
with the expected type. But this attempt takes place at level (l+1),
rightly so, since v's type could have mentioned existential variables,
(like w's does) and we want to catch that.
So we
- create a new meta-var alpha[l+1]
- fill in the InferRes ref cell 'ref' with alpha
- emit an equality constraint, thus
[W] alpha[l+1] ~ (Int -> Int)
That constraint will float outwards, as it should, unless v's
type mentions a skolem-captured variable.
This approach fails if v has a higher rank type; see
Note [Promotion and higher rank types]
Note [Promotion and higher rank types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If v had a higher-rank type, say v :: (forall a. a->a) -> Int,
then we'd emit an equality
[W] alpha[l+1] ~ ((forall a. a->a) -> Int)
which will sadly fail because we can't unify a unification variable
with a polytype. But there is nothing really wrong with the program
here.
We could just about solve this by "promote the type" of v, to expose
its polymorphic "shape" while still leaving constraints that will
prevent existential escape. But we must be careful! Exposing
the "shape" of the type is precisely what we must NOT do under
a GADT pattern match! So in this case we might promote the type
to
(forall a. a->a) -> alpha[l+1]
and emit the constraint
[W] alpha[l+1] ~ Int
Now the poromoted type can fill the ref cell, while the emitted
equality can float or not, according to the usual rules.
But that's not quite right! We are exposing the arrow! We could
deal with that too:
(forall a. mu[l+1] a a) -> alpha[l+1]
with constraints
[W] alpha[l+1] ~ Int
[W] mu[l+1] ~ (->)
Here we abstract over the '->' inside the forall, in case that
is subject to an equality constraint from a GADT match.
Note that we kept the outer (->) because that's part of
the polymorphic "shape". And becauuse of impredicativity,
GADT matches can't give equalities that affect polymorphic
shape.
This reasoning just seems too complicated, so I decided not
to do it. These higher-rank notes are just here to record
the thinking.
-}
{- *********************************************************************
* *
Generalisation
* *
********************************************************************* -}
-- | Take an "expected type" and strip off quantifiers to expose the
-- type underneath, binding the new skolems for the @thing_inside@.
-- The returned 'HsWrapper' has type @specific_ty -> expected_ty@.
tcSkolemise :: UserTypeCtxt -> TcSigmaType
-> ([TcTyVar] -> TcType -> TcM result)
-- ^ These are only ever used for scoped type variables.
-> TcM (HsWrapper, result)
-- ^ The expression has type: spec_ty -> expected_ty
tcSkolemise ctxt expected_ty thing_inside
-- We expect expected_ty to be a forall-type
-- If not, the call is a no-op
= do { traceTc "tcSkolemise" Outputable.empty
; (wrap, tv_prs, given, rho') <- deeplySkolemise expected_ty
; lvl <- getTcLevel
; when debugIsOn $
traceTc "tcSkolemise" $ vcat [
ppr lvl,
text "expected_ty" <+> ppr expected_ty,
text "inst tyvars" <+> ppr tv_prs,
text "given" <+> ppr given,
text "inst type" <+> ppr rho' ]
-- Generally we must check that the "forall_tvs" havn't been constrained
-- The interesting bit here is that we must include the free variables
-- of the expected_ty. Here's an example:
-- runST (newVar True)
-- Here, if we don't make a check, we'll get a type (ST s (MutVar s Bool))
-- for (newVar True), with s fresh. Then we unify with the runST's arg type
-- forall s'. ST s' a. That unifies s' with s, and a with MutVar s Bool.
-- So now s' isn't unconstrained because it's linked to a.
--
-- However [Oct 10] now that the untouchables are a range of
-- TcTyVars, all this is handled automatically with no need for
-- extra faffing around
; let tvs' = map snd tv_prs
skol_info = SigSkol ctxt expected_ty tv_prs
; (ev_binds, result) <- checkConstraints skol_info tvs' given $
thing_inside tvs' rho'
; return (wrap <.> mkWpLet ev_binds, result) }
-- The ev_binds returned by checkConstraints is very
-- often empty, in which case mkWpLet is a no-op
-- | Variant of 'tcSkolemise' that takes an ExpType
tcSkolemiseET :: UserTypeCtxt -> ExpSigmaType
-> (ExpRhoType -> TcM result)
-> TcM (HsWrapper, result)
tcSkolemiseET _ et@(Infer {}) thing_inside
= (idHsWrapper, ) <$> thing_inside et
tcSkolemiseET ctxt (Check ty) thing_inside
= tcSkolemise ctxt ty $ \_ -> thing_inside . mkCheckExpType
checkConstraints :: SkolemInfo
-> [TcTyVar] -- Skolems
-> [EvVar] -- Given
-> TcM result
-> TcM (TcEvBinds, result)
checkConstraints skol_info skol_tvs given thing_inside
= do { (implics, ev_binds, result)
<- buildImplication skol_info skol_tvs given thing_inside
; emitImplications implics
; return (ev_binds, result) }
buildImplication :: SkolemInfo
-> [TcTyVar] -- Skolems
-> [EvVar] -- Given
-> TcM result
-> TcM (Bag Implication, TcEvBinds, result)
buildImplication skol_info skol_tvs given thing_inside
= do { implication_needed <- implicationNeeded skol_tvs given
; if implication_needed
then do { (tclvl, wanted, result) <- pushLevelAndCaptureConstraints thing_inside
; (implics, ev_binds) <- buildImplicationFor tclvl skol_info skol_tvs given wanted
; return (implics, ev_binds, result) }
else -- Fast path. We check every function argument with
-- tcPolyExpr, which uses tcSkolemise and hence checkConstraints.
-- So this fast path is well-exercised
do { res <- thing_inside
; return (emptyBag, emptyTcEvBinds, res) } }
implicationNeeded :: [TcTyVar] -> [EvVar] -> TcM Bool
-- With the solver producing unlifted equalities, we need
-- to have an EvBindsVar for them when they might be deferred to
-- runtime. Otherwise, they end up as top-level unlifted bindings,
-- which are verboten. See also Note [Deferred errors for coercion holes]
-- in TcErrors. cf Trac #14149 for an example of what goes wrong.
implicationNeeded skol_tvs given
| null skol_tvs
, null given
= -- Empty skolems and givens
do { tc_lvl <- getTcLevel
; if not (isTopTcLevel tc_lvl) -- No implication needed if we are
then return False -- already inside an implication
else
do { dflags <- getDynFlags -- If any deferral can happen,
-- we must build an implication
; return (gopt Opt_DeferTypeErrors dflags ||
gopt Opt_DeferTypedHoles dflags ||
gopt Opt_DeferOutOfScopeVariables dflags) } }
| otherwise -- Non-empty skolems or givens
= return True -- Definitely need an implication
buildImplicationFor :: TcLevel -> SkolemInfo -> [TcTyVar]
-> [EvVar] -> WantedConstraints
-> TcM (Bag Implication, TcEvBinds)
buildImplicationFor tclvl skol_info skol_tvs given wanted
| isEmptyWC wanted && null given
-- Optimisation : if there are no wanteds, and no givens
-- don't generate an implication at all.
-- Reason for the (null given): we don't want to lose
-- the "inaccessible alternative" error check
= return (emptyBag, emptyTcEvBinds)
| otherwise
= ASSERT2( all isSkolemTyVar skol_tvs, ppr skol_tvs )
do { ev_binds_var <- newTcEvBinds
; env <- getLclEnv
; let implic = Implic { ic_tclvl = tclvl
, ic_skols = skol_tvs
, ic_no_eqs = False
, ic_given = given
, ic_wanted = wanted
, ic_status = IC_Unsolved
, ic_binds = ev_binds_var
, ic_env = env
, ic_needed = emptyVarSet
, ic_info = skol_info }
; return (unitBag implic, TcEvBinds ev_binds_var) }
{-
************************************************************************
* *
Boxy unification
* *
************************************************************************
The exported functions are all defined as versions of some
non-exported generic functions.
-}
unifyType :: Maybe (HsExpr GhcRn) -- ^ If present, has type 'ty1'
-> TcTauType -> TcTauType -> TcM TcCoercionN
-- Actual and expected types
-- Returns a coercion : ty1 ~ ty2
unifyType thing ty1 ty2 = traceTc "utype" (ppr ty1 $$ ppr ty2 $$ ppr thing) >>
uType TypeLevel origin ty1 ty2
where
origin = TypeEqOrigin { uo_actual = ty1, uo_expected = ty2
, uo_thing = ppr <$> thing
, uo_visible = True } -- always called from a visible context
unifyKind :: Maybe (HsType GhcRn) -> TcKind -> TcKind -> TcM CoercionN
unifyKind thing ty1 ty2 = traceTc "ukind" (ppr ty1 $$ ppr ty2 $$ ppr thing) >>
uType KindLevel origin ty1 ty2
where origin = TypeEqOrigin { uo_actual = ty1, uo_expected = ty2
, uo_thing = ppr <$> thing
, uo_visible = True } -- also always from a visible context
---------------
unifyPred :: PredType -> PredType -> TcM TcCoercionN
-- Actual and expected types
unifyPred = unifyType Nothing
---------------
unifyTheta :: TcThetaType -> TcThetaType -> TcM [TcCoercionN]
-- Actual and expected types
unifyTheta theta1 theta2
= do { checkTc (equalLength theta1 theta2)
(vcat [text "Contexts differ in length",
nest 2 $ parens $ text "Use RelaxedPolyRec to allow this"])
; zipWithM unifyPred theta1 theta2 }
{-
%************************************************************************
%* *
uType and friends
%* *
%************************************************************************
uType is the heart of the unifier.
-}
uType, uType_defer
:: TypeOrKind
-> CtOrigin
-> TcType -- ty1 is the *actual* type
-> TcType -- ty2 is the *expected* type
-> TcM Coercion
--------------
-- It is always safe to defer unification to the main constraint solver
-- See Note [Deferred unification]
uType_defer t_or_k origin ty1 ty2
= do { co <- emitWantedEq origin t_or_k Nominal ty1 ty2
-- Error trace only
-- NB. do *not* call mkErrInfo unless tracing is on,
-- because it is hugely expensive (#5631)
; whenDOptM Opt_D_dump_tc_trace $ do
{ ctxt <- getErrCtxt
; doc <- mkErrInfo emptyTidyEnv ctxt
; traceTc "utype_defer" (vcat [ debugPprType ty1
, debugPprType ty2
, pprCtOrigin origin
, doc])
; traceTc "utype_defer2" (ppr co)
}
; return co }
--------------
uType t_or_k origin orig_ty1 orig_ty2
= do { tclvl <- getTcLevel
; traceTc "u_tys" $ vcat
[ text "tclvl" <+> ppr tclvl
, sep [ ppr orig_ty1, text "~", ppr orig_ty2]
, pprCtOrigin origin]
; co <- go orig_ty1 orig_ty2
; if isReflCo co
then traceTc "u_tys yields no coercion" Outputable.empty
else traceTc "u_tys yields coercion:" (ppr co)
; return co }
where
go :: TcType -> TcType -> TcM Coercion
-- The arguments to 'go' are always semantically identical
-- to orig_ty{1,2} except for looking through type synonyms
-- Variables; go for uVar
-- Note that we pass in *original* (before synonym expansion),
-- so that type variables tend to get filled in with
-- the most informative version of the type
go (TyVarTy tv1) ty2
= do { lookup_res <- lookupTcTyVar tv1
; case lookup_res of
Filled ty1 -> do { traceTc "found filled tyvar" (ppr tv1 <+> text ":->" <+> ppr ty1)
; go ty1 ty2 }
Unfilled _ -> uUnfilledVar origin t_or_k NotSwapped tv1 ty2 }
go ty1 (TyVarTy tv2)
= do { lookup_res <- lookupTcTyVar tv2
; case lookup_res of
Filled ty2 -> do { traceTc "found filled tyvar" (ppr tv2 <+> text ":->" <+> ppr ty2)
; go ty1 ty2 }
Unfilled _ -> uUnfilledVar origin t_or_k IsSwapped tv2 ty1 }
-- See Note [Expanding synonyms during unification]
go ty1@(TyConApp tc1 []) (TyConApp tc2 [])
| tc1 == tc2
= return $ mkReflCo Nominal ty1
-- See Note [Expanding synonyms during unification]
--
-- Also NB that we recurse to 'go' so that we don't push a
-- new item on the origin stack. As a result if we have
-- type Foo = Int
-- and we try to unify Foo ~ Bool
-- we'll end up saying "can't match Foo with Bool"
-- rather than "can't match "Int with Bool". See Trac #4535.
go ty1 ty2
| Just ty1' <- tcView ty1 = go ty1' ty2
| Just ty2' <- tcView ty2 = go ty1 ty2'
go (CastTy t1 co1) t2
= do { co_tys <- go t1 t2
; return (mkCoherenceLeftCo co_tys co1) }
go t1 (CastTy t2 co2)
= do { co_tys <- go t1 t2
; return (mkCoherenceRightCo co_tys co2) }
-- Functions (or predicate functions) just check the two parts
go (FunTy fun1 arg1) (FunTy fun2 arg2)
= do { co_l <- uType t_or_k origin fun1 fun2
; co_r <- uType t_or_k origin arg1 arg2
; return $ mkFunCo Nominal co_l co_r }
-- Always defer if a type synonym family (type function)
-- is involved. (Data families behave rigidly.)
go ty1@(TyConApp tc1 _) ty2
| isTypeFamilyTyCon tc1 = defer ty1 ty2
go ty1 ty2@(TyConApp tc2 _)
| isTypeFamilyTyCon tc2 = defer ty1 ty2
go (TyConApp tc1 tys1) (TyConApp tc2 tys2)
-- See Note [Mismatched type lists and application decomposition]
| tc1 == tc2, equalLength tys1 tys2
= ASSERT2( isGenerativeTyCon tc1 Nominal, ppr tc1 )
do { cos <- zipWith3M (uType t_or_k) origins' tys1 tys2
; return $ mkTyConAppCo Nominal tc1 cos }
where
origins' = map (\is_vis -> if is_vis then origin else toInvisibleOrigin origin)
(tcTyConVisibilities tc1)
go (LitTy m) ty@(LitTy n)
| m == n
= return $ mkNomReflCo ty
-- See Note [Care with type applications]
-- Do not decompose FunTy against App;
-- it's often a type error, so leave it for the constraint solver
go (AppTy s1 t1) (AppTy s2 t2)
= go_app (isNextArgVisible s1) s1 t1 s2 t2
go (AppTy s1 t1) (TyConApp tc2 ts2)
| Just (ts2', t2') <- snocView ts2
= ASSERT( mightBeUnsaturatedTyCon tc2 )
go_app (isNextTyConArgVisible tc2 ts2') s1 t1 (TyConApp tc2 ts2') t2'
go (TyConApp tc1 ts1) (AppTy s2 t2)
| Just (ts1', t1') <- snocView ts1
= ASSERT( mightBeUnsaturatedTyCon tc1 )
go_app (isNextTyConArgVisible tc1 ts1') (TyConApp tc1 ts1') t1' s2 t2
go (CoercionTy co1) (CoercionTy co2)
= do { let ty1 = coercionType co1
ty2 = coercionType co2
; kco <- uType KindLevel
(KindEqOrigin orig_ty1 (Just orig_ty2) origin
(Just t_or_k))
ty1 ty2
; return $ mkProofIrrelCo Nominal kco co1 co2 }
-- Anything else fails
-- E.g. unifying for-all types, which is relative unusual
go ty1 ty2 = defer ty1 ty2
------------------
defer ty1 ty2 -- See Note [Check for equality before deferring]
| ty1 `tcEqType` ty2 = return (mkNomReflCo ty1)
| otherwise = uType_defer t_or_k origin ty1 ty2
------------------
go_app vis s1 t1 s2 t2
= do { co_s <- uType t_or_k origin s1 s2
; let arg_origin
| vis = origin
| otherwise = toInvisibleOrigin origin
; co_t <- uType t_or_k arg_origin t1 t2
; return $ mkAppCo co_s co_t }
{- Note [Check for equality before deferring]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Particularly in ambiguity checks we can get equalities like (ty ~ ty).
If ty involves a type function we may defer, which isn't very sensible.
An egregious example of this was in test T9872a, which has a type signature
Proxy :: Proxy (Solutions Cubes)
Doing the ambiguity check on this signature generates the equality
Solutions Cubes ~ Solutions Cubes
and currently the constraint solver normalises both sides at vast cost.
This little short-cut in 'defer' helps quite a bit.
Note [Care with type applications]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note: type applications need a bit of care!
They can match FunTy and TyConApp, so use splitAppTy_maybe
NB: we've already dealt with type variables and Notes,
so if one type is an App the other one jolly well better be too
Note [Mismatched type lists and application decomposition]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we find two TyConApps, you might think that the argument lists
are guaranteed equal length. But they aren't. Consider matching
w (T x) ~ Foo (T x y)
We do match (w ~ Foo) first, but in some circumstances we simply create
a deferred constraint; and then go ahead and match (T x ~ T x y).
This came up in Trac #3950.
So either
(a) either we must check for identical argument kinds
when decomposing applications,
(b) or we must be prepared for ill-kinded unification sub-problems
Currently we adopt (b) since it seems more robust -- no need to maintain
a global invariant.
Note [Expanding synonyms during unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We expand synonyms during unification, but:
* We expand *after* the variable case so that we tend to unify
variables with un-expanded type synonym. This just makes it
more likely that the inferred types will mention type synonyms
understandable to the user
* We expand *before* the TyConApp case. For example, if we have
type Phantom a = Int
and are unifying
Phantom Int ~ Phantom Char
it is *wrong* to unify Int and Char.
* The problem case immediately above can happen only with arguments
to the tycon. So we check for nullary tycons *before* expanding.
This is particularly helpful when checking (* ~ *), because * is
now a type synonym.
Note [Deferred Unification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We may encounter a unification ty1 ~ ty2 that cannot be performed syntactically,
and yet its consistency is undetermined. Previously, there was no way to still
make it consistent. So a mismatch error was issued.
Now these unifications are deferred until constraint simplification, where type
family instances and given equations may (or may not) establish the consistency.
Deferred unifications are of the form
F ... ~ ...
or x ~ ...
where F is a type function and x is a type variable.
E.g.
id :: x ~ y => x -> y
id e = e
involves the unification x = y. It is deferred until we bring into account the
context x ~ y to establish that it holds.
If available, we defer original types (rather than those where closed type
synonyms have already been expanded via tcCoreView). This is, as usual, to
improve error messages.
************************************************************************
* *
uVar and friends
* *
************************************************************************
@uVar@ is called when at least one of the types being unified is a
variable. It does {\em not} assume that the variable is a fixed point
of the substitution; rather, notice that @uVar@ (defined below) nips
back into @uTys@ if it turns out that the variable is already bound.
-}
----------
uUnfilledVar :: CtOrigin
-> TypeOrKind
-> SwapFlag
-> TcTyVar -- Tyvar 1
-> TcTauType -- Type 2
-> TcM Coercion
-- "Unfilled" means that the variable is definitely not a filled-in meta tyvar
-- It might be a skolem, or untouchable, or meta
uUnfilledVar origin t_or_k swapped tv1 ty2
= do { ty2 <- zonkTcType ty2
-- Zonk to expose things to the
-- occurs check, and so that if ty2
-- looks like a type variable then it
-- /is/ a type variable
; uUnfilledVar1 origin t_or_k swapped tv1 ty2 }
----------
uUnfilledVar1 :: CtOrigin
-> TypeOrKind
-> SwapFlag
-> TcTyVar -- Tyvar 1
-> TcTauType -- Type 2, zonked
-> TcM Coercion
uUnfilledVar1 origin t_or_k swapped tv1 ty2
| Just tv2 <- tcGetTyVar_maybe ty2
= go tv2
| otherwise
= uUnfilledVar2 origin t_or_k swapped tv1 ty2
where
-- 'go' handles the case where both are
-- tyvars so we might want to swap
go tv2 | tv1 == tv2 -- Same type variable => no-op
= return (mkNomReflCo (mkTyVarTy tv1))
| swapOverTyVars tv1 tv2 -- Distinct type variables
= uUnfilledVar2 origin t_or_k (flipSwap swapped)
tv2 (mkTyVarTy tv1)
| otherwise
= uUnfilledVar2 origin t_or_k swapped tv1 ty2
----------
uUnfilledVar2 :: CtOrigin
-> TypeOrKind
-> SwapFlag
-> TcTyVar -- Tyvar 1
-> TcTauType -- Type 2, zonked
-> TcM Coercion
uUnfilledVar2 origin t_or_k swapped tv1 ty2
= do { dflags <- getDynFlags
; cur_lvl <- getTcLevel
; go dflags cur_lvl }
where
go dflags cur_lvl
| canSolveByUnification cur_lvl tv1 ty2
, Just ty2' <- metaTyVarUpdateOK dflags tv1 ty2
= do { co_k <- uType KindLevel kind_origin (typeKind ty2') (tyVarKind tv1)
; if isTcReflCo co_k -- only proceed if the kinds matched.
then do { writeMetaTyVar tv1 ty2'
; return (mkTcNomReflCo ty2') }
else defer } -- this cannot be solved now.
-- See Note [Equalities with incompatible kinds]
-- in TcCanonical
| otherwise
= defer
-- Occurs check or an untouchable: just defer
-- NB: occurs check isn't necessarily fatal:
-- eg tv1 occured in type family parameter
ty1 = mkTyVarTy tv1
kind_origin = KindEqOrigin ty1 (Just ty2) origin (Just t_or_k)
defer = unSwap swapped (uType_defer t_or_k origin) ty1 ty2
swapOverTyVars :: TcTyVar -> TcTyVar -> Bool
swapOverTyVars tv1 tv2
| isFmvTyVar tv1 = False -- See Note [Fmv Orientation Invariant]
| isFmvTyVar tv2 = True
| Just lvl1 <- metaTyVarTcLevel_maybe tv1
-- If tv1 is touchable, swap only if tv2 is also
-- touchable and it's strictly better to update the latter
-- But see Note [Avoid unnecessary swaps]
= case metaTyVarTcLevel_maybe tv2 of
Nothing -> False
Just lvl2 | lvl2 `strictlyDeeperThan` lvl1 -> True
| lvl1 `strictlyDeeperThan` lvl2 -> False
| otherwise -> nicer_to_update tv2
-- So tv1 is not a meta tyvar
-- If only one is a meta tyvar, put it on the left
-- This is not because it'll be solved; but because
-- the floating step looks for meta tyvars on the left
| isMetaTyVar tv2 = True
-- So neither is a meta tyvar (including FlatMetaTv)
-- If only one is a flatten skolem, put it on the left
-- See Note [Eliminate flat-skols]
| not (isFlattenTyVar tv1), isFlattenTyVar tv2 = True
| otherwise = False
where
nicer_to_update tv2
= (isSigTyVar tv1 && not (isSigTyVar tv2))
|| (isSystemName (Var.varName tv2) && not (isSystemName (Var.varName tv1)))
-- @trySpontaneousSolve wi@ solves equalities where one side is a
-- touchable unification variable.
-- Returns True <=> spontaneous solve happened
canSolveByUnification :: TcLevel -> TcTyVar -> TcType -> Bool
canSolveByUnification tclvl tv xi
| isTouchableMetaTyVar tclvl tv
= case metaTyVarInfo tv of
SigTv -> is_tyvar xi
_ -> True
| otherwise -- Untouchable
= False
where
is_tyvar xi
= case tcGetTyVar_maybe xi of
Nothing -> False
Just tv -> case tcTyVarDetails tv of
MetaTv { mtv_info = info }
-> case info of
SigTv -> True
_ -> False
SkolemTv {} -> True
RuntimeUnk -> True
{- Note [Fmv Orientation Invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* We always orient a constraint
fmv ~ alpha
with fmv on the left, even if alpha is
a touchable unification variable
Reason: doing it the other way round would unify alpha:=fmv, but that
really doesn't add any info to alpha. But a later constraint alpha ~
Int might unlock everything. Comment:9 of #12526 gives a detailed
example.
WARNING: I've gone to and fro on this one several times.
I'm now pretty sure that unifying alpha:=fmv is a bad idea!
So orienting with fmvs on the left is a good thing.
This example comes from IndTypesPerfMerge. (Others include
T10226, T10009.)
From the ambiguity check for
f :: (F a ~ a) => a
we get:
[G] F a ~ a
[WD] F alpha ~ alpha, alpha ~ a
From Givens we get
[G] F a ~ fsk, fsk ~ a
Now if we flatten we get
[WD] alpha ~ fmv, F alpha ~ fmv, alpha ~ a
Now, if we unified alpha := fmv, we'd get
[WD] F fmv ~ fmv, [WD] fmv ~ a
And now we are stuck.
So instead the Fmv Orientation Invariant puts te fmv on the
left, giving
[WD] fmv ~ alpha, [WD] F alpha ~ fmv, [WD] alpha ~ a
Now we get alpha:=a, and everything works out
Note [Prevent unification with type families]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We prevent unification with type families because of an uneasy compromise.
It's perfectly sound to unify with type families, and it even improves the
error messages in the testsuite. It also modestly improves performance, at
least in some cases. But it's disastrous for test case perf/compiler/T3064.
Here is the problem: Suppose we have (F ty) where we also have [G] F ty ~ a.
What do we do? Do we reduce F? Or do we use the given? Hard to know what's
best. GHC reduces. This is a disaster for T3064, where the type's size
spirals out of control during reduction. (We're not helped by the fact that
the flattener re-flattens all the arguments every time around.) If we prevent
unification with type families, then the solver happens to use the equality
before expanding the type family.
It would be lovely in the future to revisit this problem and remove this
extra, unnecessary check. But we retain it for now as it seems to work
better in practice.
Note [Refactoring hazard: checkTauTvUpdate]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
I (Richard E.) have a sad story about refactoring this code, retained here
to prevent others (or a future me!) from falling into the same traps.
It all started with #11407, which was caused by the fact that the TyVarTy
case of defer_me didn't look in the kind. But it seemed reasonable to
simply remove the defer_me check instead.
It referred to two Notes (since removed) that were out of date, and the
fast_check code in occurCheckExpand seemed to do just about the same thing as
defer_me. The one piece that defer_me did that wasn't repeated by
occurCheckExpand was the type-family check. (See Note [Prevent unification
with type families].) So I checked the result of occurCheckExpand for any
type family occurrences and deferred if there were any. This was done
in commit e9bf7bb5cc9fb3f87dd05111aa23da76b86a8967 .
This approach turned out not to be performant, because the expanded
type was bigger than the original type, and tyConsOfType (needed to
see if there are any type family occurrences) looks through type
synonyms. So it then struck me that we could dispense with the
defer_me check entirely. This simplified the code nicely, and it cut
the allocations in T5030 by half. But, as documented in Note [Prevent
unification with type families], this destroyed performance in
T3064. Regardless, I missed this regression and the change was
committed as 3f5d1a13f112f34d992f6b74656d64d95a3f506d .
Bottom lines:
* defer_me is back, but now fixed w.r.t. #11407.
* Tread carefully before you start to refactor here. There can be
lots of hard-to-predict consequences.
Note [Type synonyms and the occur check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking we try to update a variable with type synonyms not
expanded, which improves later error messages, unless looking
inside a type synonym may help resolve a spurious occurs check
error. Consider:
type A a = ()
f :: (A a -> a -> ()) -> ()
f = \ _ -> ()
x :: ()
x = f (\ x p -> p x)
We will eventually get a constraint of the form t ~ A t. The ok function above will
properly expand the type (A t) to just (), which is ok to be unified with t. If we had
unified with the original type A t, we would lead the type checker into an infinite loop.
Hence, if the occurs check fails for a type synonym application, then (and *only* then),
the ok function expands the synonym to detect opportunities for occurs check success using
the underlying definition of the type synonym.
The same applies later on in the constraint interaction code; see TcInteract,
function @occ_check_ok@.
Note [Non-TcTyVars in TcUnify]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because the same code is now shared between unifying types and unifying
kinds, we sometimes will see proper TyVars floating around the unifier.
Example (from test case polykinds/PolyKinds12):
type family Apply (f :: k1 -> k2) (x :: k1) :: k2
type instance Apply g y = g y
When checking the instance declaration, we first *kind-check* the LHS
and RHS, discovering that the instance really should be
type instance Apply k3 k4 (g :: k3 -> k4) (y :: k3) = g y
During this kind-checking, all the tyvars will be TcTyVars. Then, however,
as a second pass, we desugar the RHS (which is done in functions prefixed
with "tc" in TcTyClsDecls"). By this time, all the kind-vars are proper
TyVars, not TcTyVars, get some kind unification must happen.
Thus, we always check if a TyVar is a TcTyVar before asking if it's a
meta-tyvar.
This used to not be necessary for type-checking (that is, before * :: *)
because expressions get desugared via an algorithm separate from
type-checking (with wrappers, etc.). Types get desugared very differently,
causing this wibble in behavior seen here.
-}
data LookupTyVarResult -- The result of a lookupTcTyVar call
= Unfilled TcTyVarDetails -- SkolemTv or virgin MetaTv
| Filled TcType
lookupTcTyVar :: TcTyVar -> TcM LookupTyVarResult
lookupTcTyVar tyvar
| MetaTv { mtv_ref = ref } <- details
= do { meta_details <- readMutVar ref
; case meta_details of
Indirect ty -> return (Filled ty)
Flexi -> do { is_touchable <- isTouchableTcM tyvar
-- Note [Unifying untouchables]
; if is_touchable then
return (Unfilled details)
else
return (Unfilled vanillaSkolemTv) } }
| otherwise
= return (Unfilled details)
where
details = tcTyVarDetails tyvar
{-
Note [Unifying untouchables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We treat an untouchable type variable as if it was a skolem. That
ensures it won't unify with anything. It's a slight had, because
we return a made-up TcTyVarDetails, but I think it works smoothly.
-}
-- | Breaks apart a function kind into its pieces.
matchExpectedFunKind :: Outputable fun
=> fun -- ^ type, only for errors
-> TcKind -- ^ function kind
-> TcM (Coercion, TcKind, TcKind)
-- ^ co :: old_kind ~ arg -> res
matchExpectedFunKind hs_ty = go
where
go k | Just k' <- tcView k = go k'
go k@(TyVarTy kvar)
| isMetaTyVar kvar
= do { maybe_kind <- readMetaTyVar kvar
; case maybe_kind of
Indirect fun_kind -> go fun_kind
Flexi -> defer k }
go k@(FunTy arg res) = return (mkNomReflCo k, arg, res)
go other = defer other
defer k
= do { arg_kind <- newMetaKindVar
; res_kind <- newMetaKindVar
; let new_fun = mkFunTy arg_kind res_kind
origin = TypeEqOrigin { uo_actual = k
, uo_expected = new_fun
, uo_thing = Just (ppr hs_ty)
, uo_visible = True
}
; co <- uType KindLevel origin k new_fun
; return (co, arg_kind, res_kind) }
{- *********************************************************************
* *
Occurrence checking
* *
********************************************************************* -}
{- Note [Occurs check expansion]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(occurCheckExpand tv xi) expands synonyms in xi just enough to get rid
of occurrences of tv outside type function arguments, if that is
possible; otherwise, it returns Nothing.
For example, suppose we have
type F a b = [a]
Then
occCheckExpand b (F Int b) = Just [Int]
but
occCheckExpand a (F a Int) = Nothing
We don't promise to do the absolute minimum amount of expanding
necessary, but we try not to do expansions we don't need to. We
prefer doing inner expansions first. For example,
type F a b = (a, Int, a, [a])
type G b = Char
We have
occCheckExpand b (F (G b)) = Just (F Char)
even though we could also expand F to get rid of b.
The two variants of the function are to support TcUnify.checkTauTvUpdate,
which wants to prevent unification with type families. For more on this
point, see Note [Prevent unification with type families] in TcUnify.
Note [Occurrence checking: look inside kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we are considering unifying
(alpha :: *) ~ Int -> (beta :: alpha -> alpha)
This may be an error (what is that alpha doing inside beta's kind?),
but we must not make the mistake of actuallyy unifying or we'll
build an infinite data structure. So when looking for occurrences
of alpha in the rhs, we must look in the kinds of type variables
that occur there.
NB: we may be able to remove the problem via expansion; see
Note [Occurs check expansion]. So we have to try that.
Note [Checking for foralls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unless we have -XImpredicativeTypes (which is a totally unsupported
feature), we do not want to unify
alpha ~ (forall a. a->a) -> Int
So we look for foralls hidden inside the type, and it's convenient
to do that at the same time as the occurs check (which looks for
occurrences of alpha).
However, it's not just a question of looking for foralls /anywhere/!
Consider
(alpha :: forall k. k->*) ~ (beta :: forall k. k->*)
This is legal; e.g. dependent/should_compile/T11635.
We don't want to reject it because of the forall in beta's kind,
but (see Note [Occurrence checking: look inside kinds]) we do
need to look in beta's kind. So we carry a flag saying if a 'forall'
is OK, and sitch the flag on when stepping inside a kind.
Why is it OK? Why does it not count as impredicative polymorphism?
The reason foralls are bad is because we reply on "seeing" foralls
when doing implicit instantiation. But the forall inside the kind is
fine. We'll generate a kind equality constraint
(forall k. k->*) ~ (forall k. k->*)
to check that the kinds of lhs and rhs are compatible. If alpha's
kind had instead been
(alpha :: kappa)
then this kind equality would rightly complain about unifying kappa
with (forall k. k->*)
-}
data OccCheckResult a
= OC_OK a
| OC_Bad -- Forall or type family
| OC_Occurs
instance Functor OccCheckResult where
fmap = liftM
instance Applicative OccCheckResult where
pure = OC_OK
(<*>) = ap
instance Monad OccCheckResult where
OC_OK x >>= k = k x
OC_Bad >>= _ = OC_Bad
OC_Occurs >>= _ = OC_Occurs
occCheckForErrors :: DynFlags -> TcTyVar -> Type -> OccCheckResult ()
-- Just for error-message generation; so we return OccCheckResult
-- so the caller can report the right kind of error
-- Check whether
-- a) the given variable occurs in the given type.
-- b) there is a forall in the type (unless we have -XImpredicativeTypes)
occCheckForErrors dflags tv ty
= case preCheck dflags True tv ty of
OC_OK _ -> OC_OK ()
OC_Bad -> OC_Bad
OC_Occurs -> case occCheckExpand tv ty of
Nothing -> OC_Occurs
Just _ -> OC_OK ()
----------------
metaTyVarUpdateOK :: DynFlags
-> TcTyVar -- tv :: k1
-> TcType -- ty :: k2
-> Maybe TcType -- possibly-expanded ty
-- (metaTyFVarUpdateOK tv ty)
-- We are about to update the meta-tyvar tv with ty
-- Check (a) that tv doesn't occur in ty (occurs check)
-- (b) that ty does not have any foralls
-- (in the impredicative case), or type functions
--
-- We have two possible outcomes:
-- (1) Return the type to update the type variable with,
-- [we know the update is ok]
-- (2) Return Nothing,
-- [the update might be dodgy]
--
-- Note that "Nothing" does not mean "definite error". For example
-- type family F a
-- type instance F Int = Int
-- consider
-- a ~ F a
-- This is perfectly reasonable, if we later get a ~ Int. For now, though,
-- we return Nothing, leaving it to the later constraint simplifier to
-- sort matters out.
--
-- See Note [Refactoring hazard: checkTauTvUpdate]
metaTyVarUpdateOK dflags tv ty
= case preCheck dflags False tv ty of
-- False <=> type families not ok
-- See Note [Prevent unification with type families]
OC_OK _ -> Just ty
OC_Bad -> Nothing -- forall or type function
OC_Occurs -> occCheckExpand tv ty
preCheck :: DynFlags -> Bool -> TcTyVar -> TcType -> OccCheckResult ()
-- A quick check for
-- (a) a forall type (unless -XImpredivativeTypes)
-- (b) a type family
-- (c) an occurrence of the type variable (occurs check)
--
-- For (a) and (b) we check only the top level of the type, NOT
-- inside the kinds of variables it mentions. But for (c) we do
-- look in the kinds of course.
preCheck dflags ty_fam_ok tv ty
= fast_check ty
where
details = tcTyVarDetails tv
impredicative_ok = canUnifyWithPolyType dflags details
ok :: OccCheckResult ()
ok = OC_OK ()
fast_check :: TcType -> OccCheckResult ()
fast_check (TyVarTy tv')
| tv == tv' = OC_Occurs
| otherwise = fast_check_occ (tyVarKind tv')
-- See Note [Occurrence checking: look inside kinds]
fast_check (TyConApp tc tys)
| bad_tc tc = OC_Bad
| otherwise = mapM fast_check tys >> ok
fast_check (LitTy {}) = ok
fast_check (FunTy a r) = fast_check a >> fast_check r
fast_check (AppTy fun arg) = fast_check fun >> fast_check arg
fast_check (CastTy ty co) = fast_check ty >> fast_check_co co
fast_check (CoercionTy co) = fast_check_co co
fast_check (ForAllTy (TvBndr tv' _) ty)
| not impredicative_ok = OC_Bad
| tv == tv' = ok
| otherwise = do { fast_check_occ (tyVarKind tv')
; fast_check_occ ty }
-- Under a forall we look only for occurrences of
-- the type variable
-- For kinds, we only do an occurs check; we do not worry
-- about type families or foralls
-- See Note [Checking for foralls]
fast_check_occ k | tv `elemVarSet` tyCoVarsOfType k = OC_Occurs
| otherwise = ok
-- For coercions, we are only doing an occurs check here;
-- no bother about impredicativity in coercions, as they're
-- inferred
fast_check_co co | tv `elemVarSet` tyCoVarsOfCo co = OC_Occurs
| otherwise = ok
bad_tc :: TyCon -> Bool
bad_tc tc
| not (impredicative_ok || isTauTyCon tc) = True
| not (ty_fam_ok || isFamFreeTyCon tc) = True
| otherwise = False
occCheckExpand :: TcTyVar -> TcType -> Maybe TcType
-- See Note [Occurs check expansion]
-- We may have needed to do some type synonym unfolding in order to
-- get rid of the variable (or forall), so we also return the unfolded
-- version of the type, which is guaranteed to be syntactically free
-- of the given type variable. If the type is already syntactically
-- free of the variable, then the same type is returned.
occCheckExpand tv ty
= go emptyVarEnv ty
where
go :: VarEnv TyVar -> Type -> Maybe Type
-- The VarEnv carries mappings necessary
-- because of kind expansion
go env (TyVarTy tv')
| tv == tv' = Nothing
| Just tv'' <- lookupVarEnv env tv' = return (mkTyVarTy tv'')
| otherwise = do { k' <- go env (tyVarKind tv')
; return (mkTyVarTy $
setTyVarKind tv' k') }
-- See Note [Occurrence checking: look inside kinds]
go _ ty@(LitTy {}) = return ty
go env (AppTy ty1 ty2) = do { ty1' <- go env ty1
; ty2' <- go env ty2
; return (mkAppTy ty1' ty2') }
go env (FunTy ty1 ty2) = do { ty1' <- go env ty1
; ty2' <- go env ty2
; return (mkFunTy ty1' ty2') }
go env ty@(ForAllTy (TvBndr tv' vis) body_ty)
| tv == tv' = return ty
| otherwise = do { ki' <- go env (tyVarKind tv')
; let tv'' = setTyVarKind tv' ki'
env' = extendVarEnv env tv' tv''
; body' <- go env' body_ty
; return (ForAllTy (TvBndr tv'' vis) body') }
-- For a type constructor application, first try expanding away the
-- offending variable from the arguments. If that doesn't work, next
-- see if the type constructor is a type synonym, and if so, expand
-- it and try again.
go env ty@(TyConApp tc tys)
= case mapM (go env) tys of
Just tys' -> return (mkTyConApp tc tys')
Nothing | Just ty' <- tcView ty -> go env ty'
| otherwise -> Nothing
-- Failing that, try to expand a synonym
go env (CastTy ty co) = do { ty' <- go env ty
; co' <- go_co env co
; return (mkCastTy ty' co') }
go env (CoercionTy co) = do { co' <- go_co env co
; return (mkCoercionTy co') }
------------------
go_co env (Refl r ty) = do { ty' <- go env ty
; return (mkReflCo r ty') }
-- Note: Coercions do not contain type synonyms
go_co env (TyConAppCo r tc args) = do { args' <- mapM (go_co env) args
; return (mkTyConAppCo r tc args') }
go_co env (AppCo co arg) = do { co' <- go_co env co
; arg' <- go_co env arg
; return (mkAppCo co' arg') }
go_co env co@(ForAllCo tv' kind_co body_co)
| tv == tv' = return co
| otherwise = do { kind_co' <- go_co env kind_co
; let tv'' = setTyVarKind tv' $
pFst (coercionKind kind_co')
env' = extendVarEnv env tv' tv''
; body' <- go_co env' body_co
; return (ForAllCo tv'' kind_co' body') }
go_co env (FunCo r co1 co2) = do { co1' <- go_co env co1
; co2' <- go_co env co2
; return (mkFunCo r co1' co2') }
go_co env (CoVarCo c) = do { k' <- go env (varType c)
; return (mkCoVarCo (setVarType c k')) }
go_co env (AxiomInstCo ax ind args) = do { args' <- mapM (go_co env) args
; return (mkAxiomInstCo ax ind args') }
go_co env (UnivCo p r ty1 ty2) = do { p' <- go_prov env p
; ty1' <- go env ty1
; ty2' <- go env ty2
; return (mkUnivCo p' r ty1' ty2') }
go_co env (SymCo co) = do { co' <- go_co env co
; return (mkSymCo co') }
go_co env (TransCo co1 co2) = do { co1' <- go_co env co1
; co2' <- go_co env co2
; return (mkTransCo co1' co2') }
go_co env (NthCo n co) = do { co' <- go_co env co
; return (mkNthCo n co') }
go_co env (LRCo lr co) = do { co' <- go_co env co
; return (mkLRCo lr co') }
go_co env (InstCo co arg) = do { co' <- go_co env co
; arg' <- go_co env arg
; return (mkInstCo co' arg') }
go_co env (CoherenceCo co1 co2) = do { co1' <- go_co env co1
; co2' <- go_co env co2
; return (mkCoherenceCo co1' co2') }
go_co env (KindCo co) = do { co' <- go_co env co
; return (mkKindCo co') }
go_co env (SubCo co) = do { co' <- go_co env co
; return (mkSubCo co') }
go_co env (AxiomRuleCo ax cs) = do { cs' <- mapM (go_co env) cs
; return (mkAxiomRuleCo ax cs') }
------------------
go_prov _ UnsafeCoerceProv = return UnsafeCoerceProv
go_prov env (PhantomProv co) = PhantomProv <$> go_co env co
go_prov env (ProofIrrelProv co) = ProofIrrelProv <$> go_co env co
go_prov _ p@(PluginProv _) = return p
go_prov _ p@(HoleProv _) = return p
canUnifyWithPolyType :: DynFlags -> TcTyVarDetails -> Bool
canUnifyWithPolyType dflags details
= case details of
MetaTv { mtv_info = SigTv } -> False
MetaTv { mtv_info = TauTv } -> xopt LangExt.ImpredicativeTypes dflags
_other -> True
-- We can have non-meta tyvars in given constraints
| ezyang/ghc | compiler/typecheck/TcUnify.hs | bsd-3-clause | 88,924 | 75 | 29 | 28,031 | 12,847 | 6,620 | 6,227 | -1 | -1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
-- |Pre-value and post-value byte alignments
module Flat.Filler (
Filler(..),
fillerLength,
PreAligned(..),
preAligned,
PostAligned(..),
postAligned,
preAlignedDecoder,
postAlignedDecoder
) where
import Flat.Class ( Generic, Flat(..) )
import Flat.Encoder ( eFiller, sFillerMax )
import Flat.Decoder ( Get )
import Control.DeepSeq ( NFData )
import Data.Typeable ( Typeable )
-- |A meaningless sequence of 0 bits terminated with a 1 bit (easier to implement than the reverse)
-- Useful to align an encoded value at byte/word boundaries.
data Filler = FillerBit Filler
| FillerEnd
deriving (Show, Eq, Ord, Typeable, Generic, NFData)
-- |Use a special encoding for the filler
instance Flat Filler where
encode _ = eFiller
size = sFillerMax
-- use generated decode
-- |A Post aligned value, a value followed by a filler
-- Useful to complete the encoding of a top-level value
data PostAligned a = PostAligned { postValue :: a, postFiller :: Filler }
#ifdef ETA_VERSION
deriving (Show, Eq, Ord, Typeable, Generic, NFData)
instance Flat a => Flat (PostAligned a) where
encode (PostAligned val fill) = trampolineEncoding (encode val) <> encode fill
#else
deriving (Show, Eq, Ord, Typeable, Generic, NFData,Flat)
#endif
-- deriving (Show, Eq, Ord, Typeable, Generic, NFData,Flat)
-- |A Pre aligned value, a value preceded by a filler
-- Useful to prealign ByteArrays, Texts and any structure that can be encoded more efficiently when byte aligned.
data PreAligned a = PreAligned { preFiller :: Filler, preValue :: a }
deriving (Show, Eq, Ord, Typeable, Generic, NFData, Flat)
-- |Length of a filler in bits
fillerLength :: Num a => Filler -> a
fillerLength FillerEnd = 1
fillerLength (FillerBit f) = 1 + fillerLength f
-- |Post align a value
postAligned :: a -> PostAligned a
postAligned a = PostAligned a FillerEnd
-- |Pre align a value
preAligned :: a -> PreAligned a
preAligned = PreAligned FillerEnd
-- postAlignedDecoder :: Get a -> Get (PostAligned a)
-- |Decode a value assuming that is PostAligned
postAlignedDecoder :: Get b -> Get b
postAlignedDecoder dec = do
v <- dec
_::Filler <- decode
return v
preAlignedDecoder :: Get b -> Get b
preAlignedDecoder dec = do
_::Filler <- decode
dec
| tittoassini/flat | src/Flat/Filler.hs | bsd-3-clause | 2,426 | 0 | 9 | 482 | 515 | 286 | 229 | 44 | 1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.SHAKE128_48.K9f214799149b (SHAKE128_48(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
import qualified Test.ZM.ADT.Word8.Kb1f46a49c8f8
data SHAKE128_48 a = SHAKE128_48 Test.ZM.ADT.Word8.Kb1f46a49c8f8.Word8
Test.ZM.ADT.Word8.Kb1f46a49c8f8.Word8
Test.ZM.ADT.Word8.Kb1f46a49c8f8.Word8
Test.ZM.ADT.Word8.Kb1f46a49c8f8.Word8
Test.ZM.ADT.Word8.Kb1f46a49c8f8.Word8
Test.ZM.ADT.Word8.Kb1f46a49c8f8.Word8
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance ( Data.Model.Model a ) => Data.Model.Model ( SHAKE128_48 a )
| tittoassini/typed | test/Test/ZM/ADT/SHAKE128_48/K9f214799149b.hs | bsd-3-clause | 893 | 0 | 7 | 236 | 178 | 117 | 61 | 16 | 0 |
module Exercises99 where
-- 1.
filterMult3 = filter (\x -> x `mod` 3 == 0)
-- 2.
lenMult3 = length . filterMult3
-- 3.
wordsFilter = filter (\x -> x /= "a" && x /= "the" && x /= "an") . words
wordsFilter2 = filter (\x -> not (elem x ["a", "the", "an"])) . words | pdmurray/haskell-book-ex | src/ch9/Exercises9.10.hs | bsd-3-clause | 267 | 0 | 13 | 60 | 122 | 69 | 53 | 5 | 1 |
module Crypto.Xor
( fixedXor
, cycleKey
, cycleKeyChar
, search
, attempt
, Match
, char
, string
, bestMatch
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Word (Word8)
import qualified Data.Bits as BB
import qualified Data.List as List
import qualified Data.List.Split as Split
import Data.Function (on)
import qualified Stats.HammingDistance as Hamming
import qualified Stats.Chi as Chi
import qualified Stats.Simple as Simple
import qualified Utils.Bytes as Bytes
import Utils.Elmify ((|>))
import Text.Printf (printf)
data Match = Match
{ c :: Word8
, str :: [Char]
, score :: Int
, chi :: Double
}
char :: Match -> Word8
char ch =
c ch
string :: Match -> [Char]
string ch =
str ch
instance Show Match where
show t = (show (c t)) ++ " : " ++ (show (score t)) ++ " : " ++ (printf "%.2f" (chi t)) ++ " : " ++ (show (str t))
instance Eq Match where
c1 == c2 =
and
[ (c c1) == (c c2)
, (str c1) == (str c2)
, (score c1) == (score c2)
]
instance Ord Match where
c1 `compare` c2 = (chi c1) `compare` (chi c2)
attemptFromChars :: [Char] -> Match
attemptFromChars hexString =
Bytes.hexStringToByteString hexString
|> attempt
attempt :: B.ByteString -> Match
attempt bs =
let
chars =
Bytes.all_chars
results =
[ decrypt bs c | c <- chars ]
in
results
|> bestMatch
bestMatch :: [Match] -> Match
bestMatch decrypts =
List.minimumBy (compare `on` chi) decrypts
decrypt :: B.ByteString -> Word8 -> Match
decrypt bs c =
let
decipherd =
cycleKeyChar bs c
|> Bytes.byteStringToString
score =
Simple.simpleFrequencyWeighted decipherd
chi =
Chi.chi decipherd
in
Match c decipherd score chi
{-
1. Let KEYSIZE be the guessed length of the key; try values from 2 to (say) 40.
2. Write a function to compute the edit distance/Hamming distance between two
strings. The Hamming distance is just the number of differing bits. The
distance between:
this is a test
and
wokka wokka!!!
is 37. Make sure your code agrees before you proceed.
3. For each KEYSIZE, take the first KEYSIZE worth of bytes, and the second
KEYSIZE worth of bytes, and find the edit distance between them. Normalize
this result by dividing by KEYSIZE.
The KEYSIZE with the smallest normalized edit distance is probably the key.
You could proceed perhaps with the smallest 2-3 KEYSIZE values. Or take 4
KEYSIZE blocks instead of 2 and average the distances.
4. Now that you probably know the KEYSIZE: break the ciphertext into blocks of
KEYSIZE length.
5. Now transpose the blocks: make a block that is the first byte of every block,
and a block that is the second byte of every block, and so on.
-
6. Solve each block as if it was single-character XOR. You already have code to
do this.
7. For each block, the single-byte XOR key that produces the best looking
histogram is the repeating-key XOR key byte for that block.
8. Put them together and you have the key.
-}
search :: B.ByteString -> [(B.ByteString, B.ByteString)]
search bs =
let
-- step 1, 2 an 3 happen in findKeyLength
keyLengths =
findKeyLength bs
|> take 2 -- We may have a keysize. To be sure we take the best
-- matching keysizes.
-- for all candidate keylengths, determine the key according to step 4 to 8.
-- one of these should be the one.
keys =
map ((findKey bs) . fst) keyLengths
-- here is where the actual decryption takes place.
-- cycleKey takes a key and a ByteString, and then cycles the key as long
-- is required (as long as the bytestring is), and 'xors' the resulting
-- bytestring agains the given one.
decrypt k =
(k, cycleKey bs k)
in
-- returns a number of results. Of which one is presumably the one
map decrypt keys
-- step 1-2-3
findKeyLength :: B.ByteString -> [(Int, Double)]
findKeyLength bs =
let
-- step 1
-- "2 to (say) 40"
keyLenhgths =
[2..40]
-- "You could proceed perhaps with the smallest 2-3 KEYSIZE values. Or take 4
-- KEYSIZE blocks instead of 2 and average the distances."
numBlocks =
12
-- helper function: from bs, take numBlocks blocks with length l
-- and compute the average hamming distance for these blocks
distance l =
blocks numBlocks l bs
|> computeDistance l
in
-- compute the distance for all keylengths
-- and sort the resuls
map distance keyLenhgths
|> List.sortBy (compare `on` snd)
computeDistance :: Int -> [B.ByteString] -> (Int, Double)
computeDistance sz bss =
let
-- helper function to get the average of a list of Doubles
-- uses List.genericLength to get the length of List as Double,
-- instead of Int.
average l =
sum l / (List.genericLength l)
-- make the list of bytestrings into tuple pairs
-- so [a, b, c, d, ..] becomes [(a, b), (c, d), ..]
-- pairs els =
-- Split.splitEvery 2 els
-- |> pairs'
pairs els =
case els of
[] ->
[]
a:[] ->
[]
a:b:rest ->
(a,b):(pairs rest)
normalise d =
(fromIntegral d) / (fromIntegral sz)
-- create the pairs, feed them into the Hamming Function
-- normalise per byte (or, more actually word8)
-- and then compute the average
averageDistance =
pairs bss
|> map Hamming.bytestrings -- step 2: see Crypto.HammingDistance
|> map normalise
|> average
in
-- return the tuple of the size and the average distance
(sz, averageDistance)
-- step 4-5-6-7-8
findKey :: B.ByteString -> Int -> B.ByteString
findKey bs sz =
let
-- step 5
transposed =
Bytes.blocks sz bs
|> B.transpose
-- step 6 & 7
key =
map (char . attempt) transposed
in
-- step 8
B.pack key
-- just a utility to take the first n blocks of the blocks returned by the
-- blocks function in the Utils.Bytes package.
blocks :: Int -> Int -> B.ByteString -> [B.ByteString]
blocks n sz bs =
Bytes.blocks sz bs
|> take n
-- XOR two bytestrings of the same length
-- uses zipWith to take one byte of each ByteString and apply the xor function on those two
-- and do that for all bytes
fixedXor :: B.ByteString -> B.ByteString -> B.ByteString
fixedXor bs1 bs2 =
B.zipWith BB.xor bs1 bs2
|> B.pack
-- a wrapper for the cycleKey function. cycleKey takes a ByteString, whereas
-- cycleKeyChar takes single Word8. So we convert that into a ByteString first
cycleKeyChar :: B.ByteString -> Word8 -> B.ByteString
cycleKeyChar bs w =
B.pack [w]
|> cycleKey bs
-- cycleKey makes use of the cycle function of Data.ByteString.Lazy
-- it takes a ByteString and creates a new infinite ByteString out of it
-- cycling the key indefinately. Since it is a lazy ByteString, we only use
-- it as far as needed, which is the length of the incoming ByteString (bs)
-- So these two are XORred against eachother, just like the fixedXor function
-- does for two ByteStrings of the same length.
cycleKey :: B.ByteString -> B.ByteString -> B.ByteString
cycleKey bs key =
let
cycledKey =
BL.fromStrict key
|>BL.cycle
in
BL.zipWith BB.xor cycledKey (BL.fromStrict bs)
|> BL.pack
|> BL.toStrict
| eelcoh/cryptochallenge | src/Crypto/Xor.hs | bsd-3-clause | 7,421 | 0 | 15 | 1,919 | 1,360 | 747 | 613 | 145 | 3 |
module Utils where
import Control.Applicative
import qualified Control.Monad.State as State
import Debug.Trace.Helpers
traceMsgM
:: (Monad m, Show r)
=> [Char] -> m r -> m r
traceMsgM a = State.liftM $ traceMsg a
(+++)
:: Applicative f
=> f [a] -> f [a] -> f [a]
(+++) = liftA2 (++)
clamp :: (Ord a) => a -> a -> a -> a
clamp minV maxV inV = (min (max inV minV) maxV)
| fros1y/umbral | src/Utils.hs | bsd-3-clause | 388 | 0 | 9 | 92 | 184 | 101 | 83 | 14 | 1 |
module Game.LambdaPad.Pads.XBox ( xbox ) where
import Game.LambdaPad.Core.PadConfig
xbox :: PadConfig
xbox = PadConfig
{ padShortName = "xbox"
, padName = "Generic X-Box pad"
, buttonConfig = simpleButtonConfig
[ (0, a)
, (1, b)
, (2, x)
, (3, y)
, (4, lb)
, (5, rb)
, (6, back)
, (7, start)
, (8, home)
, (9, ls)
, (10, rs)
]
, dpadConfig = simpleDPadConfig 0
[ (0, C)
, (1, N)
, (3, NE)
, (2, E)
, (6, SE)
, (4, S)
, (12, SW)
, (8, W)
, (9, NW)
]
, axisConfig = simpleAxisConfig
[ (0, horizStickConfig leftStick)
, (1, vertStickConfig leftStick)
, (2, triggerConfig leftTrigger)
, (3, horizStickConfig rightStick)
, (4, vertStickConfig rightStick)
, (5, triggerConfig rightTrigger)
]
}
| zearen-wover/lambda-pad-core | src/Game/LambdaPad/Pads/XBox.hs | bsd-3-clause | 1,005 | 0 | 10 | 442 | 325 | 207 | 118 | 35 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE LambdaCase #-}
module Graphics.UI.Threepenny.Extra
( module Graphics.UI.Threepenny.Extra
, module Exports
) where
import Graphics.UI.Threepenny.Core as Exports hiding
(attr,text,children,(<**>),Const(..),style,newEvent,(#.))
import Graphics.UI.Threepenny.Attributes.Extra
import qualified Foreign.JavaScript.Marshal as JS
import qualified Graphics.UI.Threepenny.Core as Core
import qualified Graphics.UI.Threepenny as UI
import qualified Data.List as L
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.Aeson (ToJSON(..),FromJSON(..),encode,decode,Value)
import Data.Aeson.Types (parseMaybe)
import Control.Monad as Exports
(void,(>=>))
import Graphics.UI.Threepenny.Action
-- UI Functionality {{{
type E = Event
type B = Behavior
type H m a = a -> m ()
type T = Tidings
newEvent :: (MonadIO f, MonadIO g) => f (E a,H g a)
newEvent = do
(e,h) <- liftIO Core.newEvent
return (e,liftIO . h)
runLater :: UI void -> UI ()
runLater m = do
w <- window
liftIOLater $ void $ runUI w m
window :: UI Window
window = askWindow
windows :: (Window -> UI a) -> UI a
windows = (window >>=)
addCSSFile :: FilePath -> UI ()
addCSSFile = windows . flip UI.addStyleSheet
newRefE :: a -> UI (Ref a,E a,H UI a)
newRefE a = do
r <- newRef a
(e,h) <- newEvent
onEvent e $ writeRef r
return (r,e,h)
onceRef :: UI (Ref (Maybe a),E a, H UI a)
onceRef = do
r <- newRef Nothing
(e,h) <- newEvent
avail <- stepper True $ False <$ e
onEvent (whenE avail e) $ \a -> do
writeRef r $ Just a
h a
return (r,e,h)
-- }}}
-- Class Modifiers {{{
(#.) :: UI Element -> String -> UI Element
e #. c = e #.. [c]
infixl 8 #.
(#..) :: UI Element -> [String] -> UI Element
e #.. cs = e # set class_ cs
infixl 8 #..
(#.+) :: UI Element -> String -> UI Element
e #.+ c = e # modify class_ (L.insert c)
infixl 8 #.+
(#.++) :: UI Element -> [String] -> UI Element
e #.++ cs = e # modify class_ (L.union cs)
infixl 8 #.++
(#.-) :: UI Element -> String -> UI Element
e #.- c = e # modify class_ (filter (/= c))
infixl 8 #.-
(#.--) :: UI Element -> [String] -> UI Element
e #.-- cs = e # modify class_ (filter (not . (`elem` cs)))
infixl 8 #.--
(#./) :: UI Element -> (String,String) -> UI Element
e #./ (old,new) = e # modify class_ (map $ \s -> if s == old then new else s)
infixl 8 #./
(.#.) :: [a] -> [a] -> [a]
(.#.) = (++)
infixr 9 .#.
-- }}}
-- Style Modifiers {{{
(#@) :: UI Element -> [(String,String)] -> UI Element
e #@ ss = e # set style ss
infixl 8 #@
(#@+) :: UI Element -> (String,String) -> UI Element
e #@+ (k,a) = e # modify style (aListInsert k a)
infixl 8 #@+
(#@++) :: UI Element -> [(String,String)] -> UI Element
e #@++ ss = e # modify style (aListUnion ss)
infixl 8 #@++
(#@-) :: UI Element -> String -> UI Element
e #@- k = e # modify style (aListDelete k)
infixl 8 #@-
(#@--) :: UI Element -> [String] -> UI Element
e #@-- ss = e # modify style (aListDeleteAll ss)
infixl 8 #@--
(#@/) :: UI Element -> (String,Maybe String -> String) -> UI Element
e #@/ (k,f) = e & modify style go
where
go :: [(String,String)] -> [(String,String)]
go ps = (k,f $ lookup k ps) : aListDelete k ps
infixl 8 #@/
aListInsert :: Eq k => k -> a -> [(k,a)] -> [(k,a)]
aListInsert k a ps =
[ (k',b)
| (k',a') <- ps
, let b = cond a a' $ k == k'
]
aListUnion :: Eq k => [(k,a)] -> [(k,a)] -> [(k,a)]
aListUnion = \case
[] -> id
(k,a) : as -> aListInsert k a . aListUnion as
aListDelete :: Eq k => k -> [(k,a)] -> [(k,a)]
aListDelete k ps =
[ (k',a)
| (k',a) <- ps
, k /= k'
]
aListDeleteAll :: Eq k => [k] -> [(k,a)] -> [(k,a)]
aListDeleteAll = \case
[] -> id
k : ks -> aListDelete k . aListDeleteAll ks
-- }}}
-- Application Combinators {{{
(##) :: a -> [a -> a] -> a
a ## fs = foldr (.) id fs a
infixl 8 ##
(&) :: a -> (a -> b) -> b
(&) = (#)
infixr 0 &
{-# INLINE (&) #-}
(&*) :: forall a. a -> [a -> a] -> a
(&*) = flip $ foldr (flip (.)) id
infixr 0 &*
{-# INLINE (&*) #-}
(.%) :: (c -> d) -> (a -> b -> c) -> a -> b -> d
(.%) = (.) . (.)
infixr 7 .%
(%.) :: (b -> b -> c) -> (a -> b) -> a -> a -> c
f %. g = \x y -> f (g x) (g y)
infixl 8 %.
(<$$>) :: (Functor f, Functor g) => (a -> b) -> f (g a) -> f (g b)
(<$$>) = fmap . fmap
infixl 4 <$$>
(<**>) :: (Applicative f, Applicative g) => f (g (a -> b)) -> f (g a) -> f (g b)
(<**>) = liftA2 (<*>)
infixl 4 <**>
for :: Functor f => f a -> (a -> b) -> f b
for = flip fmap
(<&>) :: Functor f => f (a -> b) -> a -> f b
f <&> a = ($ a) <$> f
infixl 4 <&>
-- }}}
-- Conditional Combinators {{{
upon :: Bool -> (a -> a) -> a -> a
upon b f = if b
then f
else id
cond :: a -> a -> Bool -> a
cond t f = \case
True -> t
_ -> f
(?) :: a -> a -> Bool -> a
(?) = cond
infix 8 ?
-- }}}
-- Event / Behavior Combinators {{{
(#>>) :: UI Element -> (Element -> UI (E a),Element -> a -> UI void) -> UI Element
me #>> (mv,h) = do
el <- me
e <- mv el
onEvent e $ h el
return el
infixl 8 #>>
(#>) :: UI Element -> (Element -> E a,Element -> a -> UI void) -> UI Element
me #> (e,h) = do
el <- me
on e el $ h el
return el
infixl 8 #>
(#<) :: UI Element -> (Element -> B a,Element -> a -> UI void) -> UI Element
me #< (b,h) = do
el <- me
sink (mkWriteAttr $ \a e -> void $ h e a) (b el) $ return el
infixl 8 #<
(#<<) :: UI Element -> (Element -> UI (B a),Element -> a -> UI void) -> UI Element
me #<< (mb,h) = do
el <- me
b <- mb el
sink (mkWriteAttr $ \a e -> void $ h e a) b $ return el
infixl 8 #<<
(>>=@) :: B a -> (a -> UI b) -> UI (B b)
ba >>=@ f = do
(e,h) <- newEvent
onChanges ba $ f >=> h
a <- f =<< currentValue ba
stepper a e
infixl 1 >>=@
zipB :: B a -> B b -> B (a,b)
zipB = liftA2 (,)
unzipB :: B (a,b) -> (B a,B b)
unzipB ab = (fst <$> ab,snd <$> ab)
varyAttr :: (a -> ReadWriteAttr x i o) -> B a -> ReadWriteAttr x i o
varyAttr f b = mkReadWriteAttr g s
where
g x = do
a <- currentValue b
get' (f a) x
s i x = do
a <- currentValue b
set' (f a) i x
-- }}}
-- Addtl Operators {{{
(#+>) :: UI Element -> [Element] -> UI Element
e #+> es = e #+ map return es
infixl 8 #+>
(#!) :: UI Element -> String -> UI Element
e #! s = e # set text s
infixl 8 #!
(.=) :: ReadWriteAttr x i o -> i -> UI x -> UI x
(.=) = set
infix 9 .=
(.?) :: x -> ReadWriteAttr x i o -> UI o
(.?) = flip get
infix 2 .?
-- }}}
-- Addtl Elements {{{
thead :: UI Element
thead = mkElement "thead"
tbody :: UI Element
tbody = mkElement "tbody"
head_ :: UI Element
head_ = windows getHead
body_ :: UI Element
body_ = windows getBody
script :: UI Element
script = mkElement "script"
cssPath :: String -> UI Element
cssPath p = UI.link ##
[ rel .= "stylesheet"
, type_ .= "text/css"
, href .= p
]
jsPath :: String -> UI Element
jsPath p = script ##
[ async .= True
, type_ .= "text/javascript"
, charset .= "utf8"
, src .= p
]
jsScript :: JSFunction () -> UI Element
jsScript f = do
cd <- renderFn f
script # type_ .= "text/javascript" #! cd
where
renderFn :: JSFunction a -> UI String
renderFn = liftIO . JS.toCode
-- }}}
-- JSON {{{
jsonWriteAttr :: ToJSON i => ReadWriteAttr x String o -> ReadWriteAttr x i o
jsonWriteAttr = bimapAttr toJSONStr id
toJSONStr :: ToJSON a => a -> String
toJSONStr = BS.unpack . encode
parseJSONStr :: FromJSON a => String -> Maybe a
parseJSONStr = decode . BS.pack
parseJSONValue :: FromJSON a => Value -> Maybe a
parseJSONValue = parseMaybe parseJSON
-- }}}
iota :: (Enum a, Num a) => a -> [a]
iota n = [0 .. pred n]
px :: Int -> String
px = (++ "px") . show
-- Safe list indexing
(?!) :: [a] -> Int -> Maybe a
(?!) = \case
[] -> pure Nothing
a : as -> \case
0 -> Just a
n | n > 0 -> as ?! pred n
_ -> Nothing
infix 8 ?!
ffiReady :: FFI a => String -> a
ffiReady = ffi . wrapReady
wrapReady :: String -> String
wrapReady fn = "$(document).ready(function() { " ++ fn ++ ";})"
| kylcarte/threepenny-extra | src/Graphics/UI/Threepenny/Extra.hs | bsd-3-clause | 8,065 | 0 | 14 | 2,064 | 4,062 | 2,167 | 1,895 | -1 | -1 |
{- DATX02-17-26, automated assessment of imperative programs.
- Copyright, 2017, see AUTHORS.md.
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{- Javalette Compiler, a simple C like language.
- Copyright, 2016, Mazdak Farrokhzad
-
- This program is free software; you can redistribute it and/or
- modify it under the terms of the GNU General Public License
- as published by the Free Software Foundation; either version 2
- of the License, or (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-}
{-# LANGUAGE ConstraintKinds #-}
-- | Functor, Applicative, Monad (+ transformer) utilities.
module Util.Monad (
-- ** Classes and types
MonadCo
-- ** General utilities
, (<$$>)
, unless'
, fkeep
, untilEqM
, untilMatchM
-- ** Monadic folds and traversals
, traverseJ
, traverseS
-- ** Monadic sorting
, MComparator
, sortByM
-- ** Monad stack transformations
, rebase
, io
, exceptT
) where
import Data.Function.Pointless ((.:))
import Data.Foldable (asum)
import Control.Applicative (Alternative, (<|>), empty)
import Control.Comonad (Comonad)
import Control.Monad (join, (>=>))
import Control.Monad.Identity (Identity)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Morph (MFunctor, hoist, generalize)
import Control.Monad.Except (ExceptT)
import Control.Monad.Trans.Except (except)
--------------------------------------------------------------------------------
-- Classes and types:
--------------------------------------------------------------------------------
-- | Combined constraint of being both a Monad and a Comonad.
-- With great power come greats responsibility!
type MonadCo m = (Monad m, Comonad m)
--------------------------------------------------------------------------------
-- General utilities:
--------------------------------------------------------------------------------
-- | '<$$>': alias for composition of fmap with itself.
(<$$>) :: (Functor f, Functor g) => (a -> b) -> f (g a) -> f (g b)
(<$$>) = (<$>) . (<$>)
-- | 'unless'': sequentially composes first argument with a check where the
-- value is given to a predicate (in second argument). If the predicate holds,
-- then its given value is returned, else the function in the third argument is
-- given the value and is the result of the computation.
unless' :: Monad m => m a -> (a -> Bool) -> (a -> m a) -> m a
unless' m p e = m >>= \x -> if p x then return x else e x
-- | 'fkeep': given a function that produces f b given an a. And given an a in
-- the second argument, a functor with both values as a pair is produced.
fkeep :: Functor f => (a -> f b) -> a -> f (a, b)
fkeep f a = (\b -> (a, b)) <$> f a
-- | 'untilEqM': same as 'untilEq' but in a monadic context.
untilEqM :: (Eq a, Monad m) => (a -> m a) -> m a -> m a
untilEqM = untilMatchM (==)
-- | 'untilMatchM': same as 'untilMatch' but in a monadic context.
untilMatchM :: Monad m => (a -> a -> Bool) -> (a -> m a) -> m a -> m a
untilMatchM p f = (>>= \x -> unless' (f x) (p x) (untilMatchM p f . return))
--------------------------------------------------------------------------------
-- Monadic folds:
--------------------------------------------------------------------------------
-- | 'traverseJ': traverse and then join on the result.
traverseJ :: (Applicative f, Traversable t, Monad t)
=> (a -> f (t b)) -> t a -> f (t b)
traverseJ = fmap join .: traverse
-- | 'traverseS': traverse and then asum on the result.
-- This is potentially more general in comparison to traverseJ.
traverseS :: (Applicative f, Traversable t, Alternative g)
=> (a -> f (g b)) -> t a -> f (g b)
traverseS = fmap asum .: traverse
--------------------------------------------------------------------------------
-- Monadic sorting:
--------------------------------------------------------------------------------
-- | A monadic comparator:
type MComparator m a = a -> a -> m Ordering
-- | Monadic sort, stable,
-- From: https://hackage.haskell.org/package/monadlist-0.0.2
sortByM :: Monad m => MComparator m a -> [a] -> m [a]
sortByM cmp = sequences >=> mergeAll
where
sequences (a:b:xs) =
cmp a b >>= onGT (descending b [a] xs) (ascending b (a:) xs)
sequences xs = pure [xs]
descending a as cs@(b:bs) =
cmp a b >>= onGT (descending b (a:as) bs) (((a:as) :) <$> sequences cs)
descending a as bs = ((a:as) :) <$> sequences bs
ascending a as cs@(b:bs) =
cmp a b >>=
onGT ((as [a] :) <$> sequences cs) (ascending b (as . (a:)) bs)
ascending a as bs = (as [a] :) <$> sequences bs
mergeAll [x] = pure x
mergeAll xs = mergePairs xs >>= mergeAll
mergePairs (a:b:xs) = (:) <$> merge a b <*> mergePairs xs
mergePairs xs = pure xs
merge as@(a:as') bs@(b:bs') =
cmp a b >>= onGT ((b :) <$> merge as bs') ((a :) <$> merge as' bs)
merge [] bs = pure bs
merge as [] = pure as
onGT gt ngt ord = if ord == GT then gt else ngt
--------------------------------------------------------------------------------
-- Transformers:
--------------------------------------------------------------------------------
-- | 'rebase': change base monad of from Identity to something else.
rebase :: (MFunctor t, Monad n) => t Identity b -> t n b
rebase = hoist generalize
-- | 'io': alias of 'liftIO'
io :: MonadIO m => IO a -> m a
io = liftIO
-- | Produces an ExceptT e m a, in any monad m, given an Either e a.
exceptT :: Monad m => Either e a -> ExceptT e m a
exceptT e = rebase $ except e | Centril/DATX02-17-26 | libsrc/Util/Monad.hs | gpl-2.0 | 6,640 | 0 | 13 | 1,287 | 1,442 | 790 | 652 | 70 | 9 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Test.Ganeti.Rpc (testRpc) where
import Test.QuickCheck
import Test.QuickCheck.Monadic (monadicIO, run, stop)
import Control.Applicative
import qualified Data.Map as Map
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.Objects ()
import qualified Ganeti.Rpc as Rpc
import qualified Ganeti.Objects as Objects
instance Arbitrary Rpc.RpcCallAllInstancesInfo where
arbitrary = Rpc.RpcCallAllInstancesInfo <$> arbitrary
instance Arbitrary Rpc.RpcCallInstanceList where
arbitrary = Rpc.RpcCallInstanceList <$> arbitrary
instance Arbitrary Rpc.RpcCallNodeInfo where
arbitrary = Rpc.RpcCallNodeInfo <$> arbitrary <*> arbitrary <*>
pure Map.empty
-- | Monadic check that, for an offline node and a call that does not
-- offline nodes, we get a OfflineNodeError response.
-- FIXME: We need a way of generalizing this, running it for
-- every call manually will soon get problematic
prop_noffl_request_allinstinfo :: Rpc.RpcCallAllInstancesInfo -> Property
prop_noffl_request_allinstinfo call =
forAll (arbitrary `suchThat` Objects.nodeOffline) $ \node -> monadicIO $ do
res <- run $ Rpc.executeRpcCall [node] call
stop $ res ==? [(node, Left (Rpc.OfflineNodeError node))]
prop_noffl_request_instlist :: Rpc.RpcCallInstanceList -> Property
prop_noffl_request_instlist call =
forAll (arbitrary `suchThat` Objects.nodeOffline) $ \node -> monadicIO $ do
res <- run $ Rpc.executeRpcCall [node] call
stop $ res ==? [(node, Left (Rpc.OfflineNodeError node))]
prop_noffl_request_nodeinfo :: Rpc.RpcCallNodeInfo -> Property
prop_noffl_request_nodeinfo call =
forAll (arbitrary `suchThat` Objects.nodeOffline) $ \node -> monadicIO $ do
res <- run $ Rpc.executeRpcCall [node] call
stop $ res ==? [(node, Left (Rpc.OfflineNodeError node))]
testSuite "Rpc"
[ 'prop_noffl_request_allinstinfo
, 'prop_noffl_request_instlist
, 'prop_noffl_request_nodeinfo
]
| dblia/nosql-ganeti | test/hs/Test/Ganeti/Rpc.hs | gpl-2.0 | 2,801 | 0 | 16 | 447 | 482 | 269 | 213 | 38 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.DataPipeline.ReportTaskProgress
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Task runners call 'ReportTaskProgress' when assigned a task to acknowledge that
-- it has the task. If the web service does not receive this acknowledgement
-- within 2 minutes, it assigns the task in a subsequent 'PollForTask' call. After
-- this initial acknowledgement, the task runner only needs to report progress
-- every 15 minutes to maintain its ownership of the task. You can change this
-- reporting time from 15 minutes by specifying a 'reportProgressTimeout' field in
-- your pipeline.
--
-- If a task runner does not report its status after 5 minutes, AWS Data
-- Pipeline assumes that the task runner is unable to process the task and
-- reassigns the task in a subsequent response to 'PollForTask'. Task runners
-- should call 'ReportTaskProgress' every 60 seconds.
--
-- <http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_ReportTaskProgress.html>
module Network.AWS.DataPipeline.ReportTaskProgress
(
-- * Request
ReportTaskProgress
-- ** Request constructor
, reportTaskProgress
-- ** Request lenses
, rtpFields
, rtpTaskId
-- * Response
, ReportTaskProgressResponse
-- ** Response constructor
, reportTaskProgressResponse
-- ** Response lenses
, rtprCanceled
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.DataPipeline.Types
import qualified GHC.Exts
data ReportTaskProgress = ReportTaskProgress
{ _rtpFields :: List "fields" Field
, _rtpTaskId :: Text
} deriving (Eq, Read, Show)
-- | 'ReportTaskProgress' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rtpFields' @::@ ['Field']
--
-- * 'rtpTaskId' @::@ 'Text'
--
reportTaskProgress :: Text -- ^ 'rtpTaskId'
-> ReportTaskProgress
reportTaskProgress p1 = ReportTaskProgress
{ _rtpTaskId = p1
, _rtpFields = mempty
}
-- | Key-value pairs that define the properties of the ReportTaskProgressInput
-- object.
rtpFields :: Lens' ReportTaskProgress [Field]
rtpFields = lens _rtpFields (\s a -> s { _rtpFields = a }) . _List
-- | The ID of the task assigned to the task runner. This value is provided in the
-- response for 'PollForTask'.
rtpTaskId :: Lens' ReportTaskProgress Text
rtpTaskId = lens _rtpTaskId (\s a -> s { _rtpTaskId = a })
newtype ReportTaskProgressResponse = ReportTaskProgressResponse
{ _rtprCanceled :: Bool
} deriving (Eq, Ord, Read, Show, Enum)
-- | 'ReportTaskProgressResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rtprCanceled' @::@ 'Bool'
--
reportTaskProgressResponse :: Bool -- ^ 'rtprCanceled'
-> ReportTaskProgressResponse
reportTaskProgressResponse p1 = ReportTaskProgressResponse
{ _rtprCanceled = p1
}
-- | If true, the calling task runner should cancel processing of the task. The
-- task runner does not need to call 'SetTaskStatus' for canceled tasks.
rtprCanceled :: Lens' ReportTaskProgressResponse Bool
rtprCanceled = lens _rtprCanceled (\s a -> s { _rtprCanceled = a })
instance ToPath ReportTaskProgress where
toPath = const "/"
instance ToQuery ReportTaskProgress where
toQuery = const mempty
instance ToHeaders ReportTaskProgress
instance ToJSON ReportTaskProgress where
toJSON ReportTaskProgress{..} = object
[ "taskId" .= _rtpTaskId
, "fields" .= _rtpFields
]
instance AWSRequest ReportTaskProgress where
type Sv ReportTaskProgress = DataPipeline
type Rs ReportTaskProgress = ReportTaskProgressResponse
request = post "ReportTaskProgress"
response = jsonResponse
instance FromJSON ReportTaskProgressResponse where
parseJSON = withObject "ReportTaskProgressResponse" $ \o -> ReportTaskProgressResponse
<$> o .: "canceled"
| romanb/amazonka | amazonka-datapipeline/gen/Network/AWS/DataPipeline/ReportTaskProgress.hs | mpl-2.0 | 4,877 | 0 | 10 | 1,012 | 535 | 329 | 206 | 63 | 1 |
{-
Copyright 2019 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
program = drawingOf(rectangle(2,2,2,2))
| alphalambda/codeworld | codeworld-compiler/test/testcases/tooManyArguments/source.hs | apache-2.0 | 650 | 0 | 8 | 118 | 28 | 16 | 12 | 1 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, DeriveFunctor,
PatternGuards, CPP #-}
module Idris.REPL(getClient, getPkg, getPkgCheck, getPkgClean, getPkgMkDoc,
getPkgREPL, getPkgTest, getPort, idris, idrisMain, loadInputs,
opt, runClient, runMain, ver) where
import Idris.AbsSyntax
import Idris.ASTUtils
import Idris.Apropos (apropos, aproposModules)
import Idris.REPLParser
import Idris.ElabDecls
import Idris.Erasure
import Idris.Error
import Idris.ErrReverse
import Idris.Delaborate
import Idris.Docstrings (Docstring, overview, renderDocstring, renderDocTerm)
import Idris.Help
import Idris.IdrisDoc
import Idris.Prover
import Idris.Parser hiding (indent)
import Idris.Primitives
import Idris.Coverage
import Idris.Docs hiding (Doc)
import Idris.Completion
import qualified Idris.IdeMode as IdeMode
import Idris.Chaser
import Idris.Imports
import Idris.Colours hiding (colourise)
import Idris.Inliner
import Idris.CaseSplit
import Idris.DeepSeq
import Idris.Output
import Idris.Interactive
import Idris.WhoCalls
import Idris.TypeSearch (searchByType)
import Idris.IBC (loadPkgIndex, writePkgIndex)
import Idris.REPL.Browse (namesInNS, namespacesInNS)
import Idris.Elab.Type
import Idris.Elab.Clause
import Idris.Elab.Data
import Idris.Elab.Value
import Idris.Elab.Term
import Version_idris (gitHash)
import Util.System
import Util.DynamicLinker
import Util.Net (listenOnLocalhost, listenOnLocalhostAnyPort)
import Util.Pretty hiding ((</>))
import Idris.Core.Evaluate
import Idris.Core.Execute (execute)
import Idris.Core.TT
import Idris.Core.Unify
import Idris.Core.WHNF
import Idris.Core.Constraints
import IRTS.Compiler
import IRTS.CodegenCommon
import IRTS.Exports
import IRTS.System
import Control.Category
import qualified Control.Exception as X
import Prelude hiding ((<$>), (.), id)
import Data.List.Split (splitOn)
import Data.List (groupBy)
import qualified Data.Text as T
import Text.Trifecta.Result(Result(..))
import System.Console.Haskeline as H
import System.FilePath
import System.Exit
import System.Environment
import System.Process
import System.Directory
import System.IO
import Control.Monad
import Control.Monad.Trans.Except (ExceptT, runExceptT)
import Control.Monad.Trans.State.Strict ( StateT, execStateT, evalStateT, get, put )
import Control.Monad.Trans ( lift )
import Control.Concurrent.MVar
import Network
import Control.Concurrent
import Data.Maybe
import Data.List hiding (group)
import Data.Char
import qualified Data.Set as S
import Data.Version
import Data.Word (Word)
import Data.Either (partitionEithers)
import Control.DeepSeq
import Numeric ( readHex )
import Debug.Trace
-- | Run the REPL
repl :: IState -- ^ The initial state
-> [FilePath] -- ^ The loaded modules
-> FilePath -- ^ The file to edit (with :e)
-> InputT Idris ()
repl orig mods efile
= -- H.catch
do let quiet = opt_quiet (idris_options orig)
i <- lift getIState
let colour = idris_colourRepl i
let theme = idris_colourTheme i
let mvs = idris_metavars i
let prompt = if quiet
then ""
else showMVs colour theme mvs ++
let str = mkPrompt mods ++ ">" in
(if colour && not isWindows
then colourisePrompt theme str
else str) ++ " "
x <- H.catch (getInputLine prompt)
(ctrlC (return Nothing))
case x of
Nothing -> do lift $ when (not quiet) (iputStrLn "Bye bye")
return ()
Just input -> -- H.catch
do ms <- H.catch (lift $ processInput input orig mods efile)
(ctrlC (return (Just mods)))
case ms of
Just mods -> let efile' = case mods of
[] -> efile
(e:_) -> e in
repl orig mods efile'
Nothing -> return ()
-- ctrlC)
-- ctrlC
where ctrlC :: InputT Idris a -> SomeException -> InputT Idris a
ctrlC act e = do lift $ iputStrLn (show e)
act -- repl orig mods
showMVs c thm [] = ""
showMVs c thm ms = "Holes: " ++
show' 4 c thm (map fst ms) ++ "\n"
show' 0 c thm ms = let l = length ms in
"... ( + " ++ show l
++ " other"
++ if l == 1 then ")" else "s)"
show' n c thm [m] = showM c thm m
show' n c thm (m : ms) = showM c thm m ++ ", " ++
show' (n - 1) c thm ms
showM c thm n = if c then colouriseFun thm (show n)
else show n
-- | Run the REPL server
startServer :: PortID -> IState -> [FilePath] -> Idris ()
startServer port orig fn_in = do tid <- runIO $ forkOS (serverLoop port)
return ()
where serverLoop port = withSocketsDo $
do sock <- listenOnLocalhost port
loop fn orig { idris_colourRepl = False } sock
fn = case fn_in of
(f:_) -> f
_ -> ""
loop fn ist sock
= do (h,_,_) <- accept sock
hSetEncoding h utf8
cmd <- hGetLine h
let isth = case idris_outputmode ist of
RawOutput _ -> ist {idris_outputmode = RawOutput h}
IdeMode n _ -> ist {idris_outputmode = IdeMode n h}
(ist', fn) <- processNetCmd orig isth h fn cmd
hClose h
loop fn ist' sock
processNetCmd :: IState -> IState -> Handle -> FilePath -> String ->
IO (IState, FilePath)
processNetCmd orig i h fn cmd
= do res <- case parseCmd i "(net)" cmd of
Failure err -> return (Left (Msg " invalid command"))
Success (Right c) -> runExceptT $ evalStateT (processNet fn c) i
Success (Left err) -> return (Left (Msg err))
case res of
Right x -> return x
Left err -> do hPutStrLn h (show err)
return (i, fn)
where
processNet fn Reload = processNet fn (Load fn Nothing)
processNet fn (Load f toline) =
do let ist = orig { idris_options = idris_options i
, idris_colourTheme = idris_colourTheme i
, idris_colourRepl = False
}
putIState ist
setErrContext True
setOutH h
setQuiet True
setVerbose False
mods <- loadInputs [f] toline
ist <- getIState
return (ist, f)
processNet fn c = do process fn c
ist <- getIState
return (ist, fn)
setOutH :: Handle -> Idris ()
setOutH h =
do ist <- getIState
putIState $ case idris_outputmode ist of
RawOutput _ -> ist {idris_outputmode = RawOutput h}
IdeMode n _ -> ist {idris_outputmode = IdeMode n h}
-- | Run a command on the server on localhost
runClient :: PortID -> String -> IO ()
runClient port str = withSocketsDo $ do
res <- X.try (connectTo "localhost" port)
case res of
Right h -> do
hSetEncoding h utf8
hPutStrLn h str
resp <- hGetResp "" h
putStr resp
hClose h
Left err -> do
connectionError err
exitWith (ExitFailure 1)
where hGetResp acc h = do eof <- hIsEOF h
if eof then return acc
else do l <- hGetLine h
hGetResp (acc ++ l ++ "\n") h
connectionError :: X.SomeException -> IO ()
connectionError _ =
putStrLn "Unable to connect to a running Idris repl"
initIdemodeSocket :: IO Handle
initIdemodeSocket = do
(sock, port) <- listenOnLocalhostAnyPort
putStrLn $ show port
(h, _, _) <- accept sock
hSetEncoding h utf8
return h
-- | Run the IdeMode
idemodeStart :: Bool -> IState -> [FilePath] -> Idris ()
idemodeStart s orig mods
= do h <- runIO $ if s then initIdemodeSocket else return stdout
setIdeMode True h
i <- getIState
case idris_outputmode i of
IdeMode n h ->
do runIO $ hPutStrLn h $ IdeMode.convSExp "protocol-version" IdeMode.ideModeEpoch n
case mods of
a:_ -> runIdeModeCommand h n i "" [] (IdeMode.LoadFile a Nothing)
_ -> return ()
idemode h orig mods
idemode :: Handle -> IState -> [FilePath] -> Idris ()
idemode h orig mods
= do idrisCatch
(do let inh = if h == stdout then stdin else h
len' <- runIO $ IdeMode.getLen inh
len <- case len' of
Left err -> ierror err
Right n -> return n
l <- runIO $ IdeMode.getNChar inh len ""
(sexp, id) <- case IdeMode.parseMessage l of
Left err -> ierror err
Right (sexp, id) -> return (sexp, id)
i <- getIState
putIState $ i { idris_outputmode = (IdeMode id h) }
idrisCatch -- to report correct id back!
(do let fn = case mods of
(f:_) -> f
_ -> ""
case IdeMode.sexpToCommand sexp of
Just cmd -> runIdeModeCommand h id orig fn mods cmd
Nothing -> iPrintError "did not understand" )
(\e -> do iPrintError $ show e))
(\e -> do iPrintError $ show e)
idemode h orig mods
-- | Run IDEMode commands
runIdeModeCommand :: Handle -- ^^ The handle for communication
-> Integer -- ^^ The continuation ID for the client
-> IState -- ^^ The original IState
-> FilePath -- ^^ The current open file
-> [FilePath] -- ^^ The currently loaded modules
-> IdeMode.IdeModeCommand -- ^^ The command to process
-> Idris ()
runIdeModeCommand h id orig fn mods (IdeMode.Interpret cmd) =
do c <- colourise
i <- getIState
case parseCmd i "(input)" cmd of
Failure err -> iPrintError $ show (fixColour False err)
Success (Right (Prove mode n')) ->
idrisCatch
(do process fn (Prove mode n')
isetPrompt (mkPrompt mods)
case idris_outputmode i of
IdeMode n h -> -- signal completion of proof to ide
runIO . hPutStrLn h $
IdeMode.convSExp "return"
(IdeMode.SymbolAtom "ok", "")
n
_ -> return ())
(\e -> do ist <- getIState
isetPrompt (mkPrompt mods)
case idris_outputmode i of
IdeMode n h ->
runIO . hPutStrLn h $
IdeMode.convSExp "abandon-proof" "Abandoned" n
_ -> return ()
iRenderError $ pprintErr ist e)
Success (Right cmd) -> idrisCatch
(idemodeProcess fn cmd)
(\e -> getIState >>= iRenderError . flip pprintErr e)
Success (Left err) -> iPrintError err
runIdeModeCommand h id orig fn mods (IdeMode.REPLCompletions str) =
do (unused, compls) <- replCompletion (reverse str, "")
let good = IdeMode.SexpList [IdeMode.SymbolAtom "ok",
IdeMode.toSExp (map replacement compls,
reverse unused)]
runIO . hPutStrLn h $ IdeMode.convSExp "return" good id
runIdeModeCommand h id orig fn mods (IdeMode.LoadFile filename toline) =
do i <- getIState
clearErr
putIState (orig { idris_options = idris_options i,
idris_outputmode = (IdeMode id h) })
mods <- loadInputs [filename] toline
isetPrompt (mkPrompt mods)
-- Report either success or failure
i <- getIState
case (errSpan i) of
Nothing -> let msg = maybe (IdeMode.SexpList [IdeMode.SymbolAtom "ok",
IdeMode.SexpList []])
(\fc -> IdeMode.SexpList [IdeMode.SymbolAtom "ok",
IdeMode.toSExp fc])
(idris_parsedSpan i)
in runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
Just x -> iPrintError $ "didn't load " ++ filename
idemode h orig mods
runIdeModeCommand h id orig fn mods (IdeMode.TypeOf name) =
case splitName name of
Left err -> iPrintError err
Right n -> process "(idemode)"
(Check (PRef (FC "(idemode)" (0,0) (0,0)) [] n))
runIdeModeCommand h id orig fn mods (IdeMode.DocsFor name w) =
case parseConst orig name of
Success c -> process "(idemode)" (DocStr (Right c) (howMuch w))
Failure _ ->
case splitName name of
Left err -> iPrintError err
Right n -> process "(idemode)" (DocStr (Left n) (howMuch w))
where howMuch IdeMode.Overview = OverviewDocs
howMuch IdeMode.Full = FullDocs
runIdeModeCommand h id orig fn mods (IdeMode.CaseSplit line name) =
process fn (CaseSplitAt False line (sUN name))
runIdeModeCommand h id orig fn mods (IdeMode.AddClause line name) =
process fn (AddClauseFrom False line (sUN name))
runIdeModeCommand h id orig fn mods (IdeMode.AddProofClause line name) =
process fn (AddProofClauseFrom False line (sUN name))
runIdeModeCommand h id orig fn mods (IdeMode.AddMissing line name) =
process fn (AddMissing False line (sUN name))
runIdeModeCommand h id orig fn mods (IdeMode.MakeWithBlock line name) =
process fn (MakeWith False line (sUN name))
runIdeModeCommand h id orig fn mods (IdeMode.MakeCaseBlock line name) =
process fn (MakeCase False line (sUN name))
runIdeModeCommand h id orig fn mods (IdeMode.ProofSearch r line name hints depth) =
doProofSearch fn False r line (sUN name) (map sUN hints) depth
runIdeModeCommand h id orig fn mods (IdeMode.MakeLemma line name) =
case splitName name of
Left err -> iPrintError err
Right n -> process fn (MakeLemma False line n)
runIdeModeCommand h id orig fn mods (IdeMode.Apropos a) =
process fn (Apropos [] a)
runIdeModeCommand h id orig fn mods (IdeMode.GetOpts) =
do ist <- getIState
let opts = idris_options ist
let impshow = opt_showimp opts
let errCtxt = opt_errContext opts
let options = (IdeMode.SymbolAtom "ok",
[(IdeMode.SymbolAtom "show-implicits", impshow),
(IdeMode.SymbolAtom "error-context", errCtxt)])
runIO . hPutStrLn h $ IdeMode.convSExp "return" options id
runIdeModeCommand h id orig fn mods (IdeMode.SetOpt IdeMode.ShowImpl b) =
do setImpShow b
let msg = (IdeMode.SymbolAtom "ok", b)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
runIdeModeCommand h id orig fn mods (IdeMode.SetOpt IdeMode.ErrContext b) =
do setErrContext b
let msg = (IdeMode.SymbolAtom "ok", b)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
runIdeModeCommand h id orig fn mods (IdeMode.Metavariables cols) =
do ist <- getIState
let mvs = reverse $ map fst (idris_metavars ist) \\ primDefs
let ppo = ppOptionIst ist
-- splitMvs is a list of pairs of names and their split types
let splitMvs = mapSnd (splitPi ist) (mvTys ist mvs)
-- mvOutput is the pretty-printed version ready for conversion to SExpr
let mvOutput = map (\(n, (hs, c)) -> (n, hs, c)) $
mapPair show
(\(hs, c, pc) ->
let bnd = [ n | (n,_,_) <- hs ] in
let bnds = inits bnd in
(map (\(bnd, h) -> processPremise ist bnd h)
(zip bnds hs),
render ist bnd c pc))
splitMvs
runIO . hPutStrLn h $
IdeMode.convSExp "return" (IdeMode.SymbolAtom "ok", mvOutput) id
where mapPair f g xs = zip (map (f . fst) xs) (map (g . snd) xs)
mapSnd f xs = zip (map fst xs) (map (f . snd) xs)
-- | Split a function type into a pair of premises, conclusion.
-- Each maintains both the original and delaborated versions.
splitPi :: IState -> Type -> ([(Name, Type, PTerm)], Type, PTerm)
splitPi ist (Bind n (Pi _ t _) rest) =
let (hs, c, pc) = splitPi ist rest in
((n, t, delabTy' ist [] t False False):hs,
c, delabTy' ist [] c False False)
splitPi ist tm = ([], tm, delabTy' ist [] tm False False)
-- | Get the types of a list of metavariable names
mvTys :: IState -> [Name] -> [(Name, Type)]
mvTys ist = mapSnd vToP . mapMaybe (flip lookupTyNameExact (tt_ctxt ist))
-- | Show a type and its corresponding PTerm in a format suitable
-- for the IDE - that is, pretty-printed and annotated.
render :: IState -> [Name] -> Type -> PTerm -> (String, SpanList OutputAnnotation)
render ist bnd t pt =
let prettyT = pprintPTerm (ppOptionIst ist)
(zip bnd (repeat False))
[]
(idris_infixes ist)
pt
in
displaySpans .
renderPretty 0.9 cols .
fmap (fancifyAnnots ist True) .
annotate (AnnTerm (zip bnd (take (length bnd) (repeat False))) t) $
prettyT
-- | Juggle the bits of a premise to prepare for output.
processPremise :: IState
-> [Name] -- ^ the names to highlight as bound
-> (Name, Type, PTerm)
-> (String,
String,
SpanList OutputAnnotation)
processPremise ist bnd (n, t, pt) =
let (out, spans) = render ist bnd t pt in
(show n , out, spans)
runIdeModeCommand h id orig fn mods (IdeMode.WhoCalls n) =
case splitName n of
Left err -> iPrintError err
Right n -> do calls <- whoCalls n
ist <- getIState
let msg = (IdeMode.SymbolAtom "ok",
map (\ (n,ns) -> (pn ist n, map (pn ist) ns)) calls)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
where pn ist = displaySpans .
renderPretty 0.9 1000 .
fmap (fancifyAnnots ist True) .
prettyName True True []
runIdeModeCommand h id orig fn mods (IdeMode.CallsWho n) =
case splitName n of
Left err -> iPrintError err
Right n -> do calls <- callsWho n
ist <- getIState
let msg = (IdeMode.SymbolAtom "ok",
map (\ (n,ns) -> (pn ist n, map (pn ist) ns)) calls)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
where pn ist = displaySpans .
renderPretty 0.9 1000 .
fmap (fancifyAnnots ist True) .
prettyName True True []
runIdeModeCommand h id orig fn modes (IdeMode.BrowseNS ns) =
case splitOn "." ns of
[] -> iPrintError "No namespace provided"
ns -> do underNSs <- fmap (map $ concat . intersperse ".") $ namespacesInNS ns
names <- namesInNS ns
if null underNSs && null names
then iPrintError "Invalid or empty namespace"
else do ist <- getIState
let msg = (IdeMode.SymbolAtom "ok", (underNSs, map (pn ist) names))
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
where pn ist = displaySpans .
renderPretty 0.9 1000 .
fmap (fancifyAnnots ist True) .
prettyName True True []
runIdeModeCommand h id orig fn modes (IdeMode.TermNormalise bnd tm) =
do ctxt <- getContext
ist <- getIState
let tm' = normaliseAll ctxt [] tm
ptm = annotate (AnnTerm bnd tm')
(pprintPTerm (ppOptionIst ist)
bnd
[]
(idris_infixes ist)
(delab ist tm'))
msg = (IdeMode.SymbolAtom "ok",
displaySpans .
renderPretty 0.9 80 .
fmap (fancifyAnnots ist True) $ ptm)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
runIdeModeCommand h id orig fn modes (IdeMode.TermShowImplicits bnd tm) =
ideModeForceTermImplicits h id bnd True tm
runIdeModeCommand h id orig fn modes (IdeMode.TermNoImplicits bnd tm) =
ideModeForceTermImplicits h id bnd False tm
runIdeModeCommand h id orig fn modes (IdeMode.TermElab bnd tm) =
do ist <- getIState
let ptm = annotate (AnnTerm bnd tm)
(pprintTT (map fst bnd) tm)
msg = (IdeMode.SymbolAtom "ok",
displaySpans .
renderPretty 0.9 70 .
fmap (fancifyAnnots ist True) $ ptm)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
runIdeModeCommand h id orig fn mods (IdeMode.PrintDef name) =
case splitName name of
Left err -> iPrintError err
Right n -> process "(idemode)" (PrintDef n)
runIdeModeCommand h id orig fn modes (IdeMode.ErrString e) =
do ist <- getIState
let out = displayS . renderPretty 1.0 60 $ pprintErr ist e
msg = (IdeMode.SymbolAtom "ok", IdeMode.StringAtom $ out "")
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
runIdeModeCommand h id orig fn modes (IdeMode.ErrPPrint e) =
do ist <- getIState
let (out, spans) =
displaySpans .
renderPretty 0.9 80 .
fmap (fancifyAnnots ist True) $ pprintErr ist e
msg = (IdeMode.SymbolAtom "ok", out, spans)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
runIdeModeCommand h id orig fn modes IdeMode.GetIdrisVersion =
let idrisVersion = (versionBranch version,
if not (null gitHash)
then [gitHash]
else [])
msg = (IdeMode.SymbolAtom "ok", idrisVersion)
in runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
-- | Show a term for IDEMode with the specified implicitness
ideModeForceTermImplicits :: Handle -> Integer -> [(Name, Bool)] -> Bool -> Term -> Idris ()
ideModeForceTermImplicits h id bnd impl tm =
do ist <- getIState
let expl = annotate (AnnTerm bnd tm)
(pprintPTerm ((ppOptionIst ist) { ppopt_impl = impl })
bnd [] (idris_infixes ist)
(delab ist tm))
msg = (IdeMode.SymbolAtom "ok",
displaySpans .
renderPretty 0.9 80 .
fmap (fancifyAnnots ist True) $ expl)
runIO . hPutStrLn h $ IdeMode.convSExp "return" msg id
splitName :: String -> Either String Name
splitName s = case reverse $ splitOn "." s of
[] -> Left ("Didn't understand name '" ++ s ++ "'")
[n] -> Right . sUN $ unparen n
(n:ns) -> Right $ sNS (sUN (unparen n)) ns
where unparen "" = ""
unparen ('(':x:xs) | last xs == ')' = init (x:xs)
unparen str = str
idemodeProcess :: FilePath -> Command -> Idris ()
idemodeProcess fn Warranty = process fn Warranty
idemodeProcess fn Help = process fn Help
idemodeProcess fn (ChangeDirectory f) =
do process fn (ChangeDirectory f)
dir <- runIO $ getCurrentDirectory
iPrintResult $ "changed directory to " ++ dir
idemodeProcess fn (ModImport f) = process fn (ModImport f)
idemodeProcess fn (Eval t) = process fn (Eval t)
idemodeProcess fn (NewDefn decls) = do process fn (NewDefn decls)
iPrintResult "defined"
idemodeProcess fn (Undefine n) = process fn (Undefine n)
idemodeProcess fn (ExecVal t) = process fn (ExecVal t)
idemodeProcess fn (Check (PRef x hls n)) = process fn (Check (PRef x hls n))
idemodeProcess fn (Check t) = process fn (Check t)
idemodeProcess fn (Core t) = process fn (Core t)
idemodeProcess fn (DocStr n w) = process fn (DocStr n w)
idemodeProcess fn Universes = process fn Universes
idemodeProcess fn (Defn n) = do process fn (Defn n)
iPrintResult ""
idemodeProcess fn (TotCheck n) = process fn (TotCheck n)
idemodeProcess fn (DebugInfo n) = do process fn (DebugInfo n)
iPrintResult ""
idemodeProcess fn (Search ps t) = process fn (Search ps t)
idemodeProcess fn (Spec t) = process fn (Spec t)
-- RmProof and AddProof not supported!
idemodeProcess fn (ShowProof n') = process fn (ShowProof n')
idemodeProcess fn (WHNF t) = process fn (WHNF t)
--idemodeProcess fn TTShell = process fn TTShell -- need some prove mode!
idemodeProcess fn (TestInline t) = process fn (TestInline t)
idemodeProcess fn (Execute t) = do process fn (Execute t)
iPrintResult ""
idemodeProcess fn (Compile codegen f) = do process fn (Compile codegen f)
iPrintResult ""
idemodeProcess fn (LogLvl i) = do process fn (LogLvl i)
iPrintResult ""
idemodeProcess fn (Pattelab t) = process fn (Pattelab t)
idemodeProcess fn (Missing n) = process fn (Missing n)
idemodeProcess fn (DynamicLink l) = do process fn (DynamicLink l)
iPrintResult ""
idemodeProcess fn ListDynamic = do process fn ListDynamic
iPrintResult ""
idemodeProcess fn Metavars = process fn Metavars
idemodeProcess fn (SetOpt ErrContext) = do process fn (SetOpt ErrContext)
iPrintResult ""
idemodeProcess fn (UnsetOpt ErrContext) = do process fn (UnsetOpt ErrContext)
iPrintResult ""
idemodeProcess fn (SetOpt ShowImpl) = do process fn (SetOpt ShowImpl)
iPrintResult ""
idemodeProcess fn (UnsetOpt ShowImpl) = do process fn (UnsetOpt ShowImpl)
iPrintResult ""
idemodeProcess fn (SetOpt ShowOrigErr) = do process fn (SetOpt ShowOrigErr)
iPrintResult ""
idemodeProcess fn (UnsetOpt ShowOrigErr) = do process fn (UnsetOpt ShowOrigErr)
iPrintResult ""
idemodeProcess fn (SetOpt x) = process fn (SetOpt x)
idemodeProcess fn (UnsetOpt x) = process fn (UnsetOpt x)
idemodeProcess fn (CaseSplitAt False pos str) = process fn (CaseSplitAt False pos str)
idemodeProcess fn (AddProofClauseFrom False pos str) = process fn (AddProofClauseFrom False pos str)
idemodeProcess fn (AddClauseFrom False pos str) = process fn (AddClauseFrom False pos str)
idemodeProcess fn (AddMissing False pos str) = process fn (AddMissing False pos str)
idemodeProcess fn (MakeWith False pos str) = process fn (MakeWith False pos str)
idemodeProcess fn (MakeCase False pos str) = process fn (MakeCase False pos str)
idemodeProcess fn (DoProofSearch False r pos str xs) = process fn (DoProofSearch False r pos str xs)
idemodeProcess fn (SetConsoleWidth w) = do process fn (SetConsoleWidth w)
iPrintResult ""
idemodeProcess fn (SetPrinterDepth d) = do process fn (SetPrinterDepth d)
iPrintResult ""
idemodeProcess fn (Apropos pkg a) = do process fn (Apropos pkg a)
iPrintResult ""
idemodeProcess fn (WhoCalls n) = process fn (WhoCalls n)
idemodeProcess fn (CallsWho n) = process fn (CallsWho n)
idemodeProcess fn (PrintDef n) = process fn (PrintDef n)
idemodeProcess fn (PPrint fmt n tm) = process fn (PPrint fmt n tm)
idemodeProcess fn _ = iPrintError "command not recognized or not supported"
-- | The prompt consists of the currently loaded modules, or "Idris" if there are none
mkPrompt [] = "Idris"
mkPrompt [x] = "*" ++ dropExtension x
mkPrompt (x:xs) = "*" ++ dropExtension x ++ " " ++ mkPrompt xs
-- | Determine whether a file uses literate syntax
lit f = case splitExtension f of
(_, ".lidr") -> True
_ -> False
processInput :: String ->
IState -> [FilePath] -> FilePath -> Idris (Maybe [FilePath])
processInput cmd orig inputs efile
= do i <- getIState
let opts = idris_options i
let quiet = opt_quiet opts
let fn = case inputs of
(f:_) -> f
_ -> ""
c <- colourise
case parseCmd i "(input)" cmd of
Failure err -> do iputStrLn $ show (fixColour c err)
return (Just inputs)
Success (Right Reload) ->
do putIState $ orig { idris_options = idris_options i
, idris_colourTheme = idris_colourTheme i
, imported = imported i
}
clearErr
mods <- loadInputs inputs Nothing
return (Just mods)
Success (Right (Load f toline)) ->
do putIState orig { idris_options = idris_options i
, idris_colourTheme = idris_colourTheme i
}
clearErr
mod <- loadInputs [f] toline
return (Just mod)
Success (Right (ModImport f)) ->
do clearErr
fmod <- loadModule f
return (Just (inputs ++ maybe [] (:[]) fmod))
Success (Right Edit) -> do -- takeMVar stvar
edit efile orig
return (Just inputs)
Success (Right Proofs) -> do proofs orig
return (Just inputs)
Success (Right Quit) -> do when (not quiet) (iputStrLn "Bye bye")
return Nothing
Success (Right cmd ) -> do idrisCatch (process fn cmd)
(\e -> do msg <- showErr e ; iputStrLn msg)
return (Just inputs)
Success (Left err) -> do runIO $ putStrLn err
return (Just inputs)
resolveProof :: Name -> Idris Name
resolveProof n'
= do i <- getIState
ctxt <- getContext
n <- case lookupNames n' ctxt of
[x] -> return x
[] -> return n'
ns -> ierror (CantResolveAlts ns)
return n
removeProof :: Name -> Idris ()
removeProof n =
do i <- getIState
let proofs = proof_list i
let ps = filter ((/= n) . fst) proofs
putIState $ i { proof_list = ps }
edit :: FilePath -> IState -> Idris ()
edit "" orig = iputStrLn "Nothing to edit"
edit f orig
= do i <- getIState
env <- runIO $ getEnvironment
let editor = getEditor env
let line = case errSpan i of
Just l -> ['+' : show (fst (fc_start l))]
Nothing -> []
let args = line ++ [fixName f]
runIO $ rawSystem editor args
clearErr
putIState $ orig { idris_options = idris_options i
, idris_colourTheme = idris_colourTheme i
}
loadInputs [f] Nothing
-- clearOrigPats
iucheck
return ()
where getEditor env | Just ed <- lookup "EDITOR" env = ed
| Just ed <- lookup "VISUAL" env = ed
| otherwise = "vi"
fixName file | map toLower (takeExtension file) `elem` [".lidr", ".idr"] = file
| otherwise = addExtension file "idr"
proofs :: IState -> Idris ()
proofs orig
= do i <- getIState
let ps = proof_list i
case ps of
[] -> iputStrLn "No proofs available"
_ -> iputStrLn $ "Proofs:\n\t" ++ (show $ map fst ps)
insertScript :: String -> [String] -> [String]
insertScript prf [] = "\n---------- Proofs ----------" : "" : [prf]
insertScript prf (p@"---------- Proofs ----------" : "" : xs)
= p : "" : prf : xs
insertScript prf (x : xs) = x : insertScript prf xs
process :: FilePath -> Command -> Idris ()
process fn Help = iPrintResult displayHelp
process fn Warranty = iPrintResult warranty
process fn (ChangeDirectory f)
= do runIO $ setCurrentDirectory f
return ()
process fn (ModImport f) = do fmod <- loadModule f
case fmod of
Just pr -> isetPrompt pr
Nothing -> iPrintError $ "Can't find import " ++ f
process fn (Eval t)
= withErrorReflection $
do logLvl 5 $ show t
getIState >>= flip warnDisamb t
(tm, ty) <- elabREPL recinfo ERHS t
ctxt <- getContext
let tm' = perhapsForce $ normaliseAll ctxt [] tm
let ty' = perhapsForce $ normaliseAll ctxt [] ty
-- Add value to context, call it "it"
updateContext (addCtxtDef (sUN "it") (Function ty' tm'))
ist <- getIState
logLvl 3 $ "Raw: " ++ show (tm', ty')
logLvl 10 $ "Debug: " ++ showEnvDbg [] tm'
let tmDoc = pprintDelab ist tm'
tyDoc = pprintDelab ist ty'
iPrintTermWithType tmDoc tyDoc
where perhapsForce tm | termSmallerThan 100 tm = force tm
| otherwise = tm
process fn (NewDefn decls) = do
logLvl 3 ("Defining names using these decls: " ++ show (showDecls verbosePPOption decls))
mapM_ defineName namedGroups where
namedGroups = groupBy (\d1 d2 -> getName d1 == getName d2) decls
getName :: PDecl -> Maybe Name
getName (PTy docs argdocs syn fc opts name _ ty) = Just name
getName (PClauses fc opts name (clause:clauses)) = Just (getClauseName clause)
getName (PData doc argdocs syn fc opts dataDecl) = Just (d_name dataDecl)
getName (PClass doc syn fc constraints name nfc parms parmdocs fds decls _ _) = Just name
getName _ = Nothing
-- getClauseName is partial and I am not sure it's used safely! -- trillioneyes
getClauseName (PClause fc name whole with rhs whereBlock) = name
getClauseName (PWith fc name whole with rhs pn whereBlock) = name
defineName :: [PDecl] -> Idris ()
defineName (tyDecl@(PTy docs argdocs syn fc opts name _ ty) : decls) = do
elabDecl EAll recinfo tyDecl
elabClauses recinfo fc opts name (concatMap getClauses decls)
setReplDefined (Just name)
defineName [PClauses fc opts _ [clause]] = do
let pterm = getRHS clause
(tm,ty) <- elabVal recinfo ERHS pterm
ctxt <- getContext
let tm' = force (normaliseAll ctxt [] tm)
let ty' = force (normaliseAll ctxt [] ty)
updateContext (addCtxtDef (getClauseName clause) (Function ty' tm'))
setReplDefined (Just $ getClauseName clause)
defineName (PClauses{} : _) = tclift $ tfail (Msg "Only one function body is allowed without a type declaration.")
-- fixity and syntax declarations are ignored by elabDecls, so they'll have to be handled some other way
defineName (PFix fc fixity strs : defns) = do
fmodifyState idris_fixities (map (Fix fixity) strs ++)
unless (null defns) $ defineName defns
defineName (PSyntax _ syntax:_) = do
i <- get
put (addReplSyntax i syntax)
defineName decls = do
elabDecls toplevel (map fixClauses decls)
setReplDefined (getName (head decls))
getClauses (PClauses fc opts name clauses) = clauses
getClauses _ = []
getRHS :: PClause -> PTerm
getRHS (PClause fc name whole with rhs whereBlock) = rhs
getRHS (PWith fc name whole with rhs pn whereBlock) = rhs
getRHS (PClauseR fc with rhs whereBlock) = rhs
getRHS (PWithR fc with rhs pn whereBlock) = rhs
setReplDefined :: Maybe Name -> Idris ()
setReplDefined Nothing = return ()
setReplDefined (Just n) = do
oldState <- get
fmodifyState repl_definitions (n:)
-- the "name" field of PClauses seems to always be MN 2 "__", so we need to
-- retrieve the actual name from deeper inside.
-- This should really be a full recursive walk through the structure of PDecl, but
-- I think it should work this way and I want to test sooner. Also lazy.
fixClauses :: PDecl' t -> PDecl' t
fixClauses (PClauses fc opts _ css@(clause:cs)) =
PClauses fc opts (getClauseName clause) css
fixClauses (PInstance doc argDocs syn fc constraints cls nfc parms ty instName decls) =
PInstance doc argDocs syn fc constraints cls nfc parms ty instName (map fixClauses decls)
fixClauses decl = decl
process fn (Undefine names) = undefine names
where
undefine :: [Name] -> Idris ()
undefine [] = do
allDefined <- idris_repl_defs `fmap` get
undefine' allDefined []
-- Keep track of which names you've removed so you can
-- print them out to the user afterward
undefine names = undefine' names []
undefine' [] list = do iRenderResult $ printUndefinedNames list
return ()
undefine' (n:names) already = do
allDefined <- idris_repl_defs `fmap` get
if n `elem` allDefined
then do undefinedJustNow <- undefClosure n
undefine' names (undefinedJustNow ++ already)
else do tclift $ tfail $ Msg ("Can't undefine " ++ show n ++ " because it wasn't defined at the repl")
undefine' names already
undefOne n = do fputState (ctxt_lookup n . known_terms) Nothing
-- for now just assume it's a class. Eventually we'll want some kind of
-- smart detection of exactly what kind of name we're undefining.
fputState (ctxt_lookup n . known_classes) Nothing
fmodifyState repl_definitions (delete n)
undefClosure n =
do replDefs <- idris_repl_defs `fmap` get
callGraph <- whoCalls n
let users = case lookup n callGraph of
Just ns -> nub ns
Nothing -> fail ("Tried to undefine nonexistent name" ++ show n)
undefinedJustNow <- concat `fmap` mapM undefClosure users
undefOne n
return (nub (n : undefinedJustNow))
process fn (ExecVal t)
= do ctxt <- getContext
ist <- getIState
(tm, ty) <- elabVal recinfo ERHS t
-- let tm' = normaliseAll ctxt [] tm
let ty' = normaliseAll ctxt [] ty
res <- execute tm
let (resOut, tyOut) = (prettyIst ist (delab ist res),
prettyIst ist (delab ist ty'))
iPrintTermWithType resOut tyOut
process fn (Check (PRef _ _ n))
= do ctxt <- getContext
ist <- getIState
let ppo = ppOptionIst ist
case lookupNames n ctxt of
ts@(t:_) ->
case lookup t (idris_metavars ist) of
Just (_, i, _, _) -> iRenderResult . fmap (fancifyAnnots ist True) $
showMetavarInfo ppo ist n i
Nothing -> iPrintFunTypes [] n (map (\n -> (n, pprintDelabTy ist n)) ts)
[] -> iPrintError $ "No such variable " ++ show n
where
showMetavarInfo ppo ist n i
= case lookupTy n (tt_ctxt ist) of
(ty:_) -> putTy ppo ist i [] (delab ist (errReverse ist ty))
putTy :: PPOption -> IState -> Int -> [(Name, Bool)] -> PTerm -> Doc OutputAnnotation
putTy ppo ist 0 bnd sc = putGoal ppo ist bnd sc
putTy ppo ist i bnd (PPi _ n _ t sc)
= let current = text " " <>
(case n of
MN _ _ -> text "_"
UN nm | ('_':'_':_) <- str nm -> text "_"
_ -> bindingOf n False) <+>
colon <+> align (tPretty bnd ist t) <> line
in
current <> putTy ppo ist (i-1) ((n,False):bnd) sc
putTy ppo ist _ bnd sc = putGoal ppo ist ((n,False):bnd) sc
putGoal ppo ist bnd g
= text "--------------------------------------" <$>
annotate (AnnName n Nothing Nothing Nothing) (text $ show n) <+> colon <+>
align (tPretty bnd ist g)
tPretty bnd ist t = pprintPTerm (ppOptionIst ist) bnd [] (idris_infixes ist) t
process fn (Check t)
= do (tm, ty) <- elabREPL recinfo ERHS t
ctxt <- getContext
ist <- getIState
let ppo = ppOptionIst ist
ty' = if opt_evaltypes (idris_options ist)
then normaliseC ctxt [] ty
else ty
case tm of
TType _ ->
iPrintTermWithType (prettyIst ist (PType emptyFC)) type1Doc
_ -> iPrintTermWithType (pprintDelab ist tm)
(pprintDelab ist ty')
process fn (Core t)
= do (tm, ty) <- elabREPL recinfo ERHS t
iPrintTermWithType (pprintTT [] tm) (pprintTT [] ty)
process fn (DocStr (Left n) w)
| UN ty <- n, ty == T.pack "Type" = getIState >>= iRenderResult . pprintTypeDoc
| otherwise = do
ist <- getIState
let docs = lookupCtxtName n (idris_docstrings ist) ++
map (\(n,d)-> (n, (d, [])))
(lookupCtxtName (modDocN n) (idris_moduledocs ist))
case docs of
[] -> iPrintError $ "No documentation for " ++ show n
ns -> do toShow <- mapM (showDoc ist) ns
iRenderResult (vsep toShow)
where showDoc ist (n, d) = do doc <- getDocs n w
return $ pprintDocs ist doc
modDocN (NS (UN n) ns) = NS modDocName (n:ns)
modDocN (UN n) = NS modDocName [n]
modDocN _ = sMN 1 "NotFoundForSure"
process fn (DocStr (Right c) _) -- constants only have overviews
= do ist <- getIState
iRenderResult $ pprintConstDocs ist c (constDocs c)
process fn Universes
= do i <- getIState
let cs = idris_constraints i
let cslist = S.toAscList cs
-- iputStrLn $ showSep "\n" (map show cs)
iputStrLn $ show (map uconstraint cslist)
let n = length cslist
iputStrLn $ "(" ++ show n ++ " constraints)"
case ucheck cs of
Error e -> iPrintError $ pshow i e
OK _ -> iPrintResult "Universes OK"
process fn (Defn n)
= do i <- getIState
let head = text "Compiled patterns:" <$>
text (show (lookupDef n (tt_ctxt i)))
let defs =
case lookupCtxt n (idris_patdefs i) of
[] -> empty
[(d, _)] -> text "Original definiton:" <$>
vsep (map (printCase i) d)
let tot =
case lookupTotal n (tt_ctxt i) of
[t] -> showTotal t i
_ -> empty
iRenderResult $ vsep [head, defs, tot]
where printCase i (_, lhs, rhs)
= let i' = i { idris_options = (idris_options i) { opt_showimp = True } }
in text (showTm i' (delab i lhs)) <+> text "=" <+>
text (showTm i' (delab i rhs))
process fn (TotCheck n)
= do i <- getIState
case lookupNameTotal n (tt_ctxt i) of
[] -> iPrintError $ "Unknown operator " ++ show n
ts -> do ist <- getIState
c <- colourise
let ppo = ppOptionIst ist
let showN n = annotate (AnnName n Nothing Nothing Nothing) . text $
showName (Just ist) [] ppo False n
iRenderResult . vsep .
map (\(n, t) -> hang 4 $ showN n <+> text "is" <+> showTotal t i) $
ts
process fn (DebugUnify l r)
= do (ltm, _) <- elabVal recinfo ERHS l
(rtm, _) <- elabVal recinfo ERHS r
ctxt <- getContext
case unify ctxt [] (ltm, Nothing) (rtm, Nothing) [] [] [] [] of
OK ans -> iputStrLn (show ans)
Error e -> iputStrLn (show e)
process fn (DebugInfo n)
= do i <- getIState
let oi = lookupCtxtName n (idris_optimisation i)
when (not (null oi)) $ iputStrLn (show oi)
let si = lookupCtxt n (idris_statics i)
when (not (null si)) $ iputStrLn (show si)
let di = lookupCtxt n (idris_datatypes i)
when (not (null di)) $ iputStrLn (show di)
let d = lookupDef n (tt_ctxt i)
when (not (null d)) $ iputStrLn $ "Definition: " ++ (show (head d))
let cg = lookupCtxtName n (idris_callgraph i)
i <- getIState
let cg' = lookupCtxtName n (idris_callgraph i)
sc <- checkSizeChange n
iputStrLn $ "Size change: " ++ show sc
let fn = lookupCtxtName n (idris_fninfo i)
when (not (null cg')) $ do iputStrLn "Call graph:\n"
iputStrLn (show cg')
when (not (null fn)) $ iputStrLn (show fn)
process fn (Search pkgs t) = searchByType pkgs t
process fn (CaseSplitAt updatefile l n)
= caseSplitAt fn updatefile l n
process fn (AddClauseFrom updatefile l n)
= addClauseFrom fn updatefile l n
process fn (AddProofClauseFrom updatefile l n)
= addProofClauseFrom fn updatefile l n
process fn (AddMissing updatefile l n)
= addMissing fn updatefile l n
process fn (MakeWith updatefile l n)
= makeWith fn updatefile l n
process fn (MakeCase updatefile l n)
= makeCase fn updatefile l n
process fn (MakeLemma updatefile l n)
= makeLemma fn updatefile l n
process fn (DoProofSearch updatefile rec l n hints)
= doProofSearch fn updatefile rec l n hints Nothing
process fn (Spec t)
= do (tm, ty) <- elabVal recinfo ERHS t
ctxt <- getContext
ist <- getIState
let tm' = simplify ctxt [] {- (idris_statics ist) -} tm
iPrintResult (show (delab ist tm'))
process fn (RmProof n')
= do i <- getIState
n <- resolveProof n'
let proofs = proof_list i
case lookup n proofs of
Nothing -> iputStrLn "No proof to remove"
Just _ -> do removeProof n
insertMetavar n
iputStrLn $ "Removed proof " ++ show n
where
insertMetavar :: Name -> Idris ()
insertMetavar n =
do i <- getIState
let ms = idris_metavars i
putIState $ i { idris_metavars = (n, (Nothing, 0, [], False)) : ms }
process fn' (AddProof prf)
= do fn <- do
let fn'' = takeWhile (/= ' ') fn'
ex <- runIO $ doesFileExist fn''
let fnExt = fn'' <.> "idr"
exExt <- runIO $ doesFileExist fnExt
if ex
then return fn''
else if exExt
then return fnExt
else ifail $ "Neither \""++fn''++"\" nor \""++fnExt++"\" exist"
let fb = fn ++ "~"
runIO $ copyFile fn fb -- make a backup in case something goes wrong!
prog <- runIO $ readSource fb
i <- getIState
let proofs = proof_list i
n' <- case prf of
Nothing -> case proofs of
[] -> ifail "No proof to add"
((x, _) : _) -> return x
Just nm -> return nm
n <- resolveProof n'
case lookup n proofs of
Nothing -> iputStrLn "No proof to add"
Just (mode, prf) ->
do let script = if mode
then showRunElab (lit fn) n prf
else showProof (lit fn) n prf
let prog' = insertScript script ls
runIO $ writeSource fn (unlines prog')
removeProof n
iputStrLn $ "Added proof " ++ show n
where ls = (lines prog)
process fn (ShowProof n')
= do i <- getIState
n <- resolveProof n'
let proofs = proof_list i
case lookup n proofs of
Nothing -> iPrintError "No proof to show"
Just (m, p) -> iPrintResult $ if m
then showRunElab False n p
else showProof False n p
process fn (Prove mode n')
= do ctxt <- getContext
ist <- getIState
let ns = lookupNames n' ctxt
let metavars = mapMaybe (\n -> do c <- lookup n (idris_metavars ist); return (n, c)) ns
n <- case metavars of
[] -> ierror (Msg $ "Cannot find metavariable " ++ show n')
[(n, (_,_,_,False))] -> return n
[(_, (_,_,_,True))] -> ierror (Msg $ "Declarations not solvable using prover")
ns -> ierror (CantResolveAlts (map fst ns))
prover mode (lit fn) n
-- recheck totality
i <- getIState
totcheck (fileFC "(input)", n)
mapM_ (\ (f,n) -> setTotality n Unchecked) (idris_totcheck i)
mapM_ checkDeclTotality (idris_totcheck i)
warnTotality
process fn (WHNF t)
= do (tm, ty) <- elabVal recinfo ERHS t
ctxt <- getContext
ist <- getIState
let tm' = whnf ctxt tm
iPrintResult (show (delab ist tm'))
process fn (TestInline t)
= do (tm, ty) <- elabVal recinfo ERHS t
ctxt <- getContext
ist <- getIState
let tm' = inlineTerm ist tm
c <- colourise
iPrintResult (showTm ist (delab ist tm'))
process fn (Execute tm)
= idrisCatch
(do ist <- getIState
(m, _) <- elabVal recinfo ERHS (elabExec fc tm)
(tmpn, tmph) <- runIO tempfile
runIO $ hClose tmph
t <- codegen
-- gcc adds .exe when it builds windows programs
progName <- return $ if isWindows then tmpn ++ ".exe" else tmpn
ir <- compile t tmpn (Just m)
runIO $ generate t (fst (head (idris_imported ist))) ir
case idris_outputmode ist of
RawOutput h -> do runIO $ rawSystem progName []
return ()
IdeMode n h -> runIO . hPutStrLn h $
IdeMode.convSExp "run-program" tmpn n)
(\e -> getIState >>= iRenderError . flip pprintErr e)
where fc = fileFC "main"
process fn (Compile codegen f)
| map toLower (takeExtension f) `elem` [".idr", ".lidr", ".idc"] =
iPrintError $ "Invalid filename for compiler output \"" ++ f ++"\""
| otherwise = do opts <- getCmdLine
let iface = Interface `elem` opts
m <- if iface then return Nothing else
do (m', _) <- elabVal recinfo ERHS
(PApp fc (PRef fc [] (sUN "run__IO"))
[pexp $ PRef fc [] (sNS (sUN "main") ["Main"])])
return (Just m')
ir <- compile codegen f m
i <- getIState
runIO $ generate codegen (fst (head (idris_imported i))) ir
where fc = fileFC "main"
process fn (LogLvl i) = setLogLevel i
-- Elaborate as if LHS of a pattern (debug command)
process fn (Pattelab t)
= do (tm, ty) <- elabVal recinfo ELHS t
iPrintResult $ show tm ++ "\n\n : " ++ show ty
process fn (Missing n)
= do i <- getIState
let i' = i { idris_options = (idris_options i) { opt_showimp = True } }
case lookupCtxt n (idris_patdefs i) of
[] -> iPrintError $ "Unknown operator " ++ show n
[(_, tms)] ->
iPrintResult (showSep "\n" (map (showTm i') tms))
_ -> iPrintError $ "Ambiguous name"
process fn (DynamicLink l)
= do i <- getIState
let importdirs = opt_importdirs (idris_options i)
lib = trim l
handle <- lift . lift $ tryLoadLib importdirs lib
case handle of
Nothing -> iPrintError $ "Could not load dynamic lib \"" ++ l ++ "\""
Just x -> do let libs = idris_dynamic_libs i
if x `elem` libs
then do logLvl 1 ("Tried to load duplicate library " ++ lib_name x)
return ()
else putIState $ i { idris_dynamic_libs = x:libs }
where trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace
process fn ListDynamic
= do i <- getIState
iputStrLn "Dynamic libraries:"
showLibs $ idris_dynamic_libs i
where showLibs [] = return ()
showLibs ((Lib name _):ls) = do iputStrLn $ "\t" ++ name; showLibs ls
process fn Metavars
= do ist <- getIState
let mvs = map fst (idris_metavars ist) \\ primDefs
case mvs of
[] -> iPrintError "No global holes to solve"
_ -> iPrintResult $ "Global holes:\n\t" ++ show mvs
process fn NOP = return ()
process fn (SetOpt ErrContext) = setErrContext True
process fn (UnsetOpt ErrContext) = setErrContext False
process fn (SetOpt ShowImpl) = setImpShow True
process fn (UnsetOpt ShowImpl) = setImpShow False
process fn (SetOpt ShowOrigErr) = setShowOrigErr True
process fn (UnsetOpt ShowOrigErr) = setShowOrigErr False
process fn (SetOpt AutoSolve) = setAutoSolve True
process fn (UnsetOpt AutoSolve) = setAutoSolve False
process fn (SetOpt NoBanner) = setNoBanner True
process fn (UnsetOpt NoBanner) = setNoBanner False
process fn (SetOpt WarnReach) = fmodifyState opts_idrisCmdline $ nub . (WarnReach:)
process fn (UnsetOpt WarnReach) = fmodifyState opts_idrisCmdline $ delete WarnReach
process fn (SetOpt EvalTypes) = setEvalTypes True
process fn (UnsetOpt EvalTypes) = setEvalTypes False
process fn (SetOpt _) = iPrintError "Not a valid option"
process fn (UnsetOpt _) = iPrintError "Not a valid option"
process fn (SetColour ty c) = setColour ty c
process fn ColourOn
= do ist <- getIState
putIState $ ist { idris_colourRepl = True }
process fn ColourOff
= do ist <- getIState
putIState $ ist { idris_colourRepl = False }
process fn ListErrorHandlers =
do ist <- getIState
iPrintResult $ case idris_errorhandlers ist of
[] -> "No registered error handlers"
handlers -> "Registered error handlers: " ++ (concat . intersperse ", " . map show) handlers
process fn (SetConsoleWidth w) = setWidth w
process fn (SetPrinterDepth d) = setDepth d
process fn (Apropos pkgs a) =
do orig <- getIState
when (not (null pkgs)) $
iputStrLn $ "Searching packages: " ++ showSep ", " pkgs
mapM_ loadPkgIndex pkgs
ist <- getIState
let mods = aproposModules ist (T.pack a)
let names = apropos ist (T.pack a)
let aproposInfo = [ (n,
delabTy ist n,
fmap (overview . fst) (lookupCtxtExact n (idris_docstrings ist)))
| n <- sort names, isUN n ]
if (not (null mods)) || (not (null aproposInfo))
then iRenderResult $ vsep (map (\(m, d) -> text "Module" <+> text m <$>
ppD ist d <> line) mods) <$>
vsep (map (prettyDocumentedIst ist) aproposInfo)
else iRenderError $ text "No results found"
where isUN (UN _) = True
isUN (NS n _) = isUN n
isUN _ = False
ppD ist = renderDocstring (renderDocTerm (pprintDelab ist) (normaliseAll (tt_ctxt ist) []))
process fn (WhoCalls n) =
do calls <- whoCalls n
ist <- getIState
iRenderResult . vsep $
map (\(n, ns) ->
text "Callers of" <+> prettyName True True [] n <$>
indent 1 (vsep (map ((text "*" <+>) . align . prettyName True True []) ns)))
calls
process fn (CallsWho n) =
do calls <- callsWho n
ist <- getIState
iRenderResult . vsep $
map (\(n, ns) ->
prettyName True True [] n <+> text "calls:" <$>
indent 1 (vsep (map ((text "*" <+>) . align . prettyName True True []) ns)))
calls
process fn (Browse ns) =
do underNSs <- namespacesInNS ns
names <- namesInNS ns
if null underNSs && null names
then iPrintError "Invalid or empty namespace"
else do ist <- getIState
iRenderResult $
text "Namespaces:" <$>
indent 2 (vsep (map (text . showSep ".") underNSs)) <$>
text "Names:" <$>
indent 2 (vsep (map (\n -> prettyName True False [] n <+> colon <+>
(group . align $ pprintDelabTy ist n))
names))
-- IdrisDoc
process fn (MakeDoc s) =
do istate <- getIState
let names = words s
parse n | Success x <- runparser (fmap fst name) istate fn n = Right x
parse n = Left n
(bad, nss) = partitionEithers $ map parse names
cd <- runIO $ getCurrentDirectory
let outputDir = cd </> "doc"
result <- if null bad then runIO $ generateDocs istate nss outputDir
else return . Left $ "Illegal name: " ++ head bad
case result of Right _ -> iputStrLn "IdrisDoc generated"
Left err -> iPrintError err
process fn (PrintDef n) =
do result <- pprintDef n
case result of
[] -> iPrintError "Not found"
outs -> iRenderResult . vsep $ outs
-- Show relevant transformation rules for the name 'n'
process fn (TransformInfo n)
= do i <- getIState
let ts = lookupCtxt n (idris_transforms i)
let res = map (showTrans i) ts
iRenderResult . vsep $ concat res
where showTrans :: IState -> [(Term, Term)] -> [Doc OutputAnnotation]
showTrans i [] = []
showTrans i ((lhs, rhs) : ts)
= let ppTm tm = annotate (AnnTerm [] tm) .
pprintPTerm (ppOptionIst i) [] [] [] .
delab i $ tm
ts' = showTrans i ts in
ppTm lhs <+> text " ==> " <+> ppTm rhs : ts'
-- iRenderOutput (pretty lhs)
-- iputStrLn " ==> "
-- iPrintTermWithType (pprintDelab i rhs)
-- iputStrLn "---------------"
-- showTrans i ts
process fn (PPrint fmt width (PRef _ _ n))
= do outs <- pprintDef n
iPrintResult =<< renderExternal fmt width (vsep outs)
process fn (PPrint fmt width t)
= do (tm, ty) <- elabVal recinfo ERHS t
ctxt <- getContext
ist <- getIState
let ppo = ppOptionIst ist
ty' = normaliseC ctxt [] ty
iPrintResult =<< renderExternal fmt width (pprintDelab ist tm)
showTotal :: Totality -> IState -> Doc OutputAnnotation
showTotal t@(Partial (Other ns)) i
= text "possibly not total due to:" <$>
vsep (map (showTotalN i) ns)
showTotal t@(Partial (Mutual ns)) i
= text "possibly not total due to recursive path:" <$>
align (group (vsep (punctuate comma
(map (\n -> annotate (AnnName n Nothing Nothing Nothing) $
text (show n))
ns))))
showTotal t i = text (show t)
showTotalN :: IState -> Name -> Doc OutputAnnotation
showTotalN i n = case lookupTotal n (tt_ctxt i) of
[t] -> showTotal t i
_ -> empty
displayHelp = let vstr = showVersion version in
"\nIdris version " ++ vstr ++ "\n" ++
"--------------" ++ map (\x -> '-') vstr ++ "\n\n" ++
concatMap cmdInfo helphead ++
concatMap cmdInfo help
where cmdInfo (cmds, args, text) = " " ++ col 16 12 (showSep " " cmds) (show args) text
col c1 c2 l m r =
l ++ take (c1 - length l) (repeat ' ') ++
m ++ take (c2 - length m) (repeat ' ') ++ r ++ "\n"
pprintDef :: Name -> Idris [Doc OutputAnnotation]
pprintDef n =
do ist <- getIState
ctxt <- getContext
let ambiguous = length (lookupNames n ctxt) > 1
patdefs = idris_patdefs ist
tyinfo = idris_datatypes ist
return $ map (ppDef ambiguous ist) (lookupCtxtName n patdefs) ++
map (ppTy ambiguous ist) (lookupCtxtName n tyinfo) ++
map (ppCon ambiguous ist) (filter (flip isDConName ctxt) (lookupNames n ctxt))
where ppDef :: Bool -> IState -> (Name, ([([Name], Term, Term)], [PTerm])) -> Doc OutputAnnotation
ppDef amb ist (n, (clauses, missing)) =
prettyName True amb [] n <+> colon <+>
align (pprintDelabTy ist n) <$>
ppClauses ist clauses <> ppMissing missing
ppClauses ist [] = text "No clauses."
ppClauses ist cs = vsep (map pp cs)
where pp (vars, lhs, rhs) =
let ppTm t = annotate (AnnTerm (zip vars (repeat False)) t) .
pprintPTerm (ppOptionIst ist)
(zip vars (repeat False))
[] [] .
delab ist $
t
in group $ ppTm lhs <+> text "=" <$> (group . align . hang 2 $ ppTm rhs)
ppMissing _ = empty
ppTy :: Bool -> IState -> (Name, TypeInfo) -> Doc OutputAnnotation
ppTy amb ist (n, TI constructors isCodata _ _ _)
= kwd key <+> prettyName True amb [] n <+> colon <+>
align (pprintDelabTy ist n) <+> kwd "where" <$>
indent 2 (vsep (map (ppCon False ist) constructors))
where
key | isCodata = "codata"
| otherwise = "data"
kwd = annotate AnnKeyword . text
ppCon amb ist n = prettyName True amb [] n <+> colon <+> align (pprintDelabTy ist n)
helphead =
[ (["Command"], SpecialHeaderArg, "Purpose"),
([""], NoArg, "")
]
replSettings :: Maybe FilePath -> Settings Idris
replSettings hFile = setComplete replCompletion $ defaultSettings {
historyFile = hFile
}
-- | Invoke as if from command line. It is an error if there are unresolved totality problems.
idris :: [Opt] -> IO (Maybe IState)
idris opts = do res <- runExceptT $ execStateT totalMain idrisInit
case res of
Left err -> do putStrLn $ pshow idrisInit err
return Nothing
Right ist -> return (Just ist)
where totalMain = do idrisMain opts
ist <- getIState
case idris_totcheckfail ist of
((fc, msg):_) -> ierror . At fc . Msg $ "Could not build: "++ msg
[] -> return ()
loadInputs :: [FilePath] -> Maybe Int -> Idris [FilePath]
loadInputs inputs toline -- furthest line to read in input source files
= idrisCatch
(do ist <- getIState
-- if we're in --check and not outputting anything, don't bother
-- loading, as it gets really slow if there's lots of modules in
-- a package (instead, reload all at the end to check for
-- consistency only)
opts <- getCmdLine
let loadCode = case opt getOutput opts of
[] -> not (NoREPL `elem` opts)
_ -> True
-- For each ifile list, check it and build ibcs in the same clean IState
-- so that they don't interfere with each other when checking
let ninputs = zip [1..] inputs
ifiles <- mapWhileOK (\(num, input) ->
do putIState ist
modTree <- buildTree
(map snd (take (num-1) ninputs))
input
let ifiles = getModuleFiles modTree
logLvl 1 ("MODULE TREE : " ++ show modTree)
logLvl 1 ("RELOAD: " ++ show ifiles)
when (not (all ibc ifiles) || loadCode) $
tryLoad False (filter (not . ibc) ifiles)
-- return the files that need rechecking
return (input, ifiles))
ninputs
inew <- getIState
let tidata = idris_tyinfodata inew
let patdefs = idris_patdefs inew
-- If it worked, load the whole thing from all the ibcs together
case errSpan inew of
Nothing ->
do putIState (ist { idris_tyinfodata = tidata })
ibcfiles <- mapM findNewIBC (nub (concat (map snd ifiles)))
tryLoad True (mapMaybe id ibcfiles)
_ -> return ()
ist <- getIState
putIState (ist { idris_tyinfodata = tidata,
idris_patdefs = patdefs })
exports <- findExports
case opt getOutput opts of
[] -> performUsageAnalysis (getExpNames exports) -- interactive
_ -> return [] -- batch, will be checked by the compiler
return (map fst ifiles))
(\e -> do i <- getIState
case e of
At f e' -> do setErrSpan f
iWarn f $ pprintErr i e'
ProgramLineComment -> return () -- fail elsewhere
_ -> do setErrSpan emptyFC -- FIXME! Propagate it
-- Issue #1576 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1576
iWarn emptyFC $ pprintErr i e
return [])
where -- load all files, stop if any fail
tryLoad :: Bool -> [IFileType] -> Idris ()
tryLoad keepstate [] = warnTotality >> return ()
tryLoad keepstate (f : fs)
= do ist <- getIState
let maxline
= case toline of
Nothing -> Nothing
Just l -> case f of
IDR fn -> if any (fmatch fn) inputs
then Just l
else Nothing
LIDR fn -> if any (fmatch fn) inputs
then Just l
else Nothing
_ -> Nothing
loadFromIFile True f maxline
inew <- getIState
-- FIXME: Save these in IBC to avoid this hack! Need to
-- preserve it all from source inputs
--
-- Issue #1577 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1577
let tidata = idris_tyinfodata inew
let patdefs = idris_patdefs inew
ok <- noErrors
when ok $ do when (not keepstate) $ putIState ist
ist <- getIState
putIState (ist { idris_tyinfodata = tidata,
idris_patdefs = patdefs })
tryLoad keepstate fs
ibc (IBC _ _) = True
ibc _ = False
fmatch ('.':'/':xs) ys = fmatch xs ys
fmatch xs ('.':'/':ys) = fmatch xs ys
fmatch xs ys = xs == ys
findNewIBC :: IFileType -> Idris (Maybe IFileType)
findNewIBC i@(IBC _ _) = return (Just i)
findNewIBC s@(IDR f) = do ist <- get
ibcsd <- valIBCSubDir ist
let ibc = ibcPathNoFallback ibcsd f
ok <- runIO $ doesFileExist ibc
if ok then return (Just (IBC ibc s))
else return Nothing
findNewIBC s@(LIDR f) = do ist <- get
ibcsd <- valIBCSubDir ist
let ibc = ibcPathNoFallback ibcsd f
ok <- runIO $ doesFileExist ibc
if ok then return (Just (IBC ibc s))
else return Nothing
-- Like mapM, but give up when there's an error
mapWhileOK f [] = return []
mapWhileOK f (x : xs) = do x' <- f x
ok <- noErrors
if ok then do xs' <- mapWhileOK f xs
return (x' : xs')
else return [x']
idrisMain :: [Opt] -> Idris ()
idrisMain opts =
do mapM_ setWidth (opt getConsoleWidth opts)
let inputs = opt getFile opts
let quiet = Quiet `elem` opts
let nobanner = NoBanner `elem` opts
let idesl = Idemode `elem` opts || IdemodeSocket `elem` opts
let runrepl = not (NoREPL `elem` opts)
let verbose = runrepl || Verbose `elem` opts
let output = opt getOutput opts
let ibcsubdir = opt getIBCSubDir opts
let importdirs = opt getImportDir opts
let bcs = opt getBC opts
let pkgdirs = opt getPkgDir opts
-- Set default optimisations
let optimise = case opt getOptLevel opts of
[] -> 2
xs -> last xs
setOptLevel optimise
let outty = case opt getOutputTy opts of
[] -> if Interface `elem` opts then
Object else Executable
xs -> last xs
let cgn = case opt getCodegen opts of
[] -> Via "c"
xs -> last xs
let cgFlags = opt getCodegenArgs opts
-- Now set/unset specifically chosen optimisations
sequence_ (opt getOptimisation opts)
script <- case opt getExecScript opts of
[] -> return Nothing
x:y:xs -> do iputStrLn "More than one interpreter expression found."
runIO $ exitWith (ExitFailure 1)
[expr] -> return (Just expr)
let immediate = opt getEvalExpr opts
let port = getPort opts
when (DefaultTotal `elem` opts) $ do i <- getIState
putIState (i { default_total = True })
tty <- runIO $ isATTY
setColourise $ not quiet && last (tty : opt getColour opts)
mapM_ addLangExt (opt getLanguageExt opts)
setREPL runrepl
setQuiet (quiet || isJust script || not (null immediate))
setVerbose verbose
setCmdLine opts
setOutputTy outty
setNoBanner nobanner
setCodegen cgn
mapM_ (addFlag cgn) cgFlags
mapM_ makeOption opts
-- if we have the --bytecode flag, drop into the bytecode assembler
case bcs of
[] -> return ()
xs -> return () -- runIO $ mapM_ bcAsm xs
case ibcsubdir of
[] -> setIBCSubDir ""
(d:_) -> setIBCSubDir d
setImportDirs importdirs
setNoBanner nobanner
when (not (NoBasePkgs `elem` opts)) $ do
addPkgDir "prelude"
addPkgDir "base"
mapM_ addPkgDir pkgdirs
elabPrims
when (not (NoBuiltins `elem` opts)) $ do x <- loadModule "Builtins"
addAutoImport "Builtins"
return ()
when (not (NoPrelude `elem` opts)) $ do x <- loadModule "Prelude"
addAutoImport "Prelude"
return ()
when (runrepl && not idesl) initScript
nobanner <- getNoBanner
when (runrepl &&
not quiet &&
not idesl &&
not (isJust script) &&
not nobanner &&
null immediate) $
iputStrLn banner
orig <- getIState
mods <- if idesl then return [] else loadInputs inputs Nothing
let efile = case inputs of
[] -> ""
(f:_) -> f
runIO $ hSetBuffering stdout LineBuffering
ok <- noErrors
when ok $ case output of
[] -> return ()
(o:_) -> idrisCatch (process "" (Compile cgn o))
(\e -> do ist <- getIState ; iputStrLn $ pshow ist e)
case immediate of
[] -> return ()
exprs -> do setWidth InfinitelyWide
mapM_ (\str -> do ist <- getIState
c <- colourise
case parseExpr ist str of
Failure err -> do iputStrLn $ show (fixColour c err)
runIO $ exitWith (ExitFailure 1)
Success e -> process "" (Eval e))
exprs
runIO $ exitWith ExitSuccess
case script of
Nothing -> return ()
Just expr -> execScript expr
-- Create Idris data dir + repl history and config dir
idrisCatch (do dir <- getIdrisUserDataDir
exists <- runIO $ doesDirectoryExist dir
unless exists $ logLvl 1 ("Creating " ++ dir)
runIO $ createDirectoryIfMissing True (dir </> "repl"))
(\e -> return ())
historyFile <- fmap (</> "repl" </> "history") getIdrisUserDataDir
when ok $ case opt getPkgIndex opts of
(f : _) -> writePkgIndex f
_ -> return ()
when (runrepl && not idesl) $ do
-- clearOrigPats
startServer port orig mods
runInputT (replSettings (Just historyFile)) $ repl orig mods efile
let idesock = IdemodeSocket `elem` opts
when (idesl) $ idemodeStart idesock orig inputs
ok <- noErrors
when (not ok) $ runIO (exitWith (ExitFailure 1))
where
makeOption (OLogging i) = setLogLevel i
makeOption TypeCase = setTypeCase True
makeOption TypeInType = setTypeInType True
makeOption NoCoverage = setCoverage False
makeOption ErrContext = setErrContext True
makeOption _ = return ()
addPkgDir :: String -> Idris ()
addPkgDir p = do ddir <- runIO $ getIdrisLibDir
addImportDir (ddir </> p)
addIBC (IBCImportDir (ddir </> p))
runMain :: Idris () -> IO ()
runMain prog = do res <- runExceptT $ execStateT prog idrisInit
case res of
Left err -> putStrLn $ "Uncaught error: " ++ show err
Right _ -> return ()
execScript :: String -> Idris ()
execScript expr = do i <- getIState
c <- colourise
case parseExpr i expr of
Failure err -> do iputStrLn $ show (fixColour c err)
runIO $ exitWith (ExitFailure 1)
Success term -> do ctxt <- getContext
(tm, _) <- elabVal recinfo ERHS term
res <- execute tm
runIO $ exitWith ExitSuccess
-- | Get the platform-specific, user-specific Idris dir
getIdrisUserDataDir :: Idris FilePath
getIdrisUserDataDir = runIO $ getAppUserDataDirectory "idris"
-- | Locate the platform-specific location for the init script
getInitScript :: Idris FilePath
getInitScript = do idrisDir <- getIdrisUserDataDir
return $ idrisDir </> "repl" </> "init"
-- | Run the initialisation script
initScript :: Idris ()
initScript = do script <- getInitScript
idrisCatch (do go <- runIO $ doesFileExist script
when go $ do
h <- runIO $ openFile script ReadMode
runInit h
runIO $ hClose h)
(\e -> iPrintError $ "Error reading init file: " ++ show e)
where runInit :: Handle -> Idris ()
runInit h = do eof <- lift . lift $ hIsEOF h
ist <- getIState
unless eof $ do
line <- runIO $ hGetLine h
script <- getInitScript
c <- colourise
processLine ist line script c
runInit h
processLine i cmd input clr =
case parseCmd i input cmd of
Failure err -> runIO $ print (fixColour clr err)
Success (Right Reload) -> iPrintError "Init scripts cannot reload the file"
Success (Right (Load f _)) -> iPrintError "Init scripts cannot load files"
Success (Right (ModImport f)) -> iPrintError "Init scripts cannot import modules"
Success (Right Edit) -> iPrintError "Init scripts cannot invoke the editor"
Success (Right Proofs) -> proofs i
Success (Right Quit) -> iPrintError "Init scripts cannot quit Idris"
Success (Right cmd ) -> process [] cmd
Success (Left err) -> runIO $ print err
getFile :: Opt -> Maybe String
getFile (Filename str) = Just str
getFile _ = Nothing
getBC :: Opt -> Maybe String
getBC (BCAsm str) = Just str
getBC _ = Nothing
getOutput :: Opt -> Maybe String
getOutput (Output str) = Just str
getOutput _ = Nothing
getIBCSubDir :: Opt -> Maybe String
getIBCSubDir (IBCSubDir str) = Just str
getIBCSubDir _ = Nothing
getImportDir :: Opt -> Maybe String
getImportDir (ImportDir str) = Just str
getImportDir _ = Nothing
getPkgDir :: Opt -> Maybe String
getPkgDir (Pkg str) = Just str
getPkgDir _ = Nothing
getPkg :: Opt -> Maybe (Bool, String)
getPkg (PkgBuild str) = Just (False, str)
getPkg (PkgInstall str) = Just (True, str)
getPkg _ = Nothing
getPkgClean :: Opt -> Maybe String
getPkgClean (PkgClean str) = Just str
getPkgClean _ = Nothing
getPkgREPL :: Opt -> Maybe String
getPkgREPL (PkgREPL str) = Just str
getPkgREPL _ = Nothing
getPkgCheck :: Opt -> Maybe String
getPkgCheck (PkgCheck str) = Just str
getPkgCheck _ = Nothing
-- | Returns None if given an Opt which is not PkgMkDoc
-- Otherwise returns Just x, where x is the contents of PkgMkDoc
getPkgMkDoc :: Opt -- ^ Opt to extract
-> Maybe String -- ^ Result
getPkgMkDoc (PkgMkDoc str) = Just str
getPkgMkDoc _ = Nothing
getPkgTest :: Opt -- ^ the option to extract
-> Maybe String -- ^ the package file to test
getPkgTest (PkgTest f) = Just f
getPkgTest _ = Nothing
getCodegen :: Opt -> Maybe Codegen
getCodegen (UseCodegen x) = Just x
getCodegen _ = Nothing
getCodegenArgs :: Opt -> Maybe String
getCodegenArgs (CodegenArgs args) = Just args
getCodegenArgs _ = Nothing
getConsoleWidth :: Opt -> Maybe ConsoleWidth
getConsoleWidth (UseConsoleWidth x) = Just x
getConsoleWidth _ = Nothing
getExecScript :: Opt -> Maybe String
getExecScript (InterpretScript expr) = Just expr
getExecScript _ = Nothing
getPkgIndex :: Opt -> Maybe FilePath
getPkgIndex (PkgIndex file) = Just file
getPkgIndex _ = Nothing
getEvalExpr :: Opt -> Maybe String
getEvalExpr (EvalExpr expr) = Just expr
getEvalExpr _ = Nothing
getOutputTy :: Opt -> Maybe OutputType
getOutputTy (OutputTy t) = Just t
getOutputTy _ = Nothing
getLanguageExt :: Opt -> Maybe LanguageExt
getLanguageExt (Extension e) = Just e
getLanguageExt _ = Nothing
getTriple :: Opt -> Maybe String
getTriple (TargetTriple x) = Just x
getTriple _ = Nothing
getCPU :: Opt -> Maybe String
getCPU (TargetCPU x) = Just x
getCPU _ = Nothing
getOptLevel :: Opt -> Maybe Int
getOptLevel (OptLevel x) = Just x
getOptLevel _ = Nothing
getOptimisation :: Opt -> Maybe (Idris ())
getOptimisation (AddOpt p) = Just $ addOptimise p
getOptimisation (RemoveOpt p) = Just $ removeOptimise p
getOptimisation _ = Nothing
getColour :: Opt -> Maybe Bool
getColour (ColourREPL b) = Just b
getColour _ = Nothing
getClient :: Opt -> Maybe String
getClient (Client x) = Just x
getClient _ = Nothing
-- Get the first valid port
getPort :: [Opt] -> PortID
getPort [] = defaultPort
getPort (Port p:xs)
| all (`elem` ['0'..'9']) p = PortNumber $ fromIntegral (read p)
| otherwise = getPort xs
getPort (_:xs) = getPort xs
opt :: (Opt -> Maybe a) -> [Opt] -> [a]
opt = mapMaybe
ver = showVersion version ++ suffix
where
suffix = if gitHash =="" then "" else "-" ++ gitHash
defaultPort :: PortID
defaultPort = PortNumber (fromIntegral 4294)
banner = " ____ __ _ \n" ++
" / _/___/ /____(_)____ \n" ++
" / // __ / ___/ / ___/ Version " ++ ver ++ "\n" ++
" _/ // /_/ / / / (__ ) http://www.idris-lang.org/ \n" ++
" /___/\\__,_/_/ /_/____/ Type :? for help \n" ++
"\n" ++
"Idris is free software with ABSOLUTELY NO WARRANTY. \n" ++
"For details type :warranty."
warranty = "\n" ++
"\t THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY \n" ++
"\t EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE \n" ++
"\t IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR \n" ++
"\t PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE \n" ++
"\t LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR \n" ++
"\t CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF \n" ++
"\t SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR \n" ++
"\t BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, \n" ++
"\t WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE \n" ++
"\t OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN\n" ++
"\t IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
| osa1/Idris-dev | src/Idris/REPL.hs | bsd-3-clause | 85,246 | 980 | 18 | 32,659 | 12,437 | 8,472 | 3,965 | 1,690 | 70 |
{-# LANGUAGE QuasiQuotes, TemplateHaskell, MultiParamTypeClasses, DeriveDataTypeable #-}
module Language.Pads.DeriveExample where
import Data.DeriveTH -- Library for deriving instances for existing types
import Data.IP
import Data.Time.Clock
import Data.Time.Calendar
import Data.Data
import Language.Pads.Padsc
{- Example for simple types, defined locally -}
data Color = RGB Int Int Int
| CMYK Int Int Int Int
data Bigger = B1 Color | B2 Int
(derive makeEq ''Color)
(derive makeEq ''Bigger)
{- Make IPv4 an instance of Typeable and Data -}
(derive makeTypeable ''IPv4)
(derive makeDataAbstract ''IPv4)
{- Example declarations to make Pip a Pads base type -}
newtype Pip = Pip IPv4
deriving (Typeable, Data, Show, Eq)
instance Pads Pip Base_md where
parsePP = pip_parseM
pip_parseM :: PadsParser (Pip, Base_md)
pip_parseM = undefined -- **** Needs to be defined ******
{- Make UTCTime an instance of Typeable and Data -}
--(derive makeDataAbstract ''DiffTime)
-- (derive makeData ''UTCTime)
-- (derive makeData ''Day)
{- Example declarations to make Ptime a Pads base type -}
instance Pretty UTCTime where
ppr (UTCTime day sec) = text $ show (UTCTime day sec)
newtype Ptime = Ptime UTCTime
deriving (Typeable, Data, Eq, Show)
instance Pads Ptime Base_md where
parsePP = ptime_parseM
ptime_parseM :: PadsParser (Ptime, Base_md)
ptime_parseM = undefined -- **** Needs to be defined ******
| GaloisInc/pads-haskell | Language/Pads/DeriveExample.hs | bsd-3-clause | 1,508 | 0 | 9 | 325 | 303 | 170 | 133 | 29 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE CPP #-}
module Main where
import Control.Monad
import Imports
import Foundation hiding (second)
import Foundation.Array
import Foundation.Collection
import Foundation.VFS (Path (..), filename, parent)
import Foundation.VFS.FilePath
import qualified Prelude
import GHC.ST
import Test.Data.Unicode
import Test.Data.List
import Test.Foundation.Collection
import Test.Foundation.Conduit
import Test.Foundation.Number
import Test.Foundation.Array
import Test.Foundation.String.Base64
import Test.Foundation.ChunkedUArray
import Test.Foundation.String
import Test.Foundation.Parser
import qualified Test.Foundation.Bits as Bits
data CharMap = CharMap LUString Prelude.Int
deriving (Show)
addChar :: Prelude.Int -> Char -> Char
addChar n c = toEnum ((fromEnum c + n) `Prelude.mod` 0x10ffff)
instance Arbitrary CharMap where
arbitrary =
CharMap <$> arbitrary <*> choose (1,12)
instance Arbitrary FileName where
arbitrary = do
s <- choose (1, 30)
unsafeFileName . fromList <$> vectorOf s genChar
where
genChar :: Gen Word8
genChar = frequency
[ (10, pure 0x2e) -- '.'
, (10, choose (0x41, 0x5A)) -- [A-Z]
, (10, choose (0x61, 0x7A)) -- [a-z]
, (5, choose (0x30, 0x39)) -- [a-z]
, (5, elements [0x2d, 0x5f]) -- [-_]
]
instance Arbitrary Relativity where
arbitrary = elements [ Absolute, Relative ]
instance Arbitrary FilePath where
arbitrary = do
s <- choose (0, 10)
unsafeFilePath <$> arbitrary
<*> vectorOf s arbitrary
transEq :: Eq a => (t -> t1) -> (t1 -> a) -> (t1 -> a) -> t -> Bool
transEq unWrap f g s =
let s' = unWrap s in f s' == g s'
--stringEq :: Eq a => (b -> a) -> (String -> b) -> (LString -> a) -> LUString -> Bool
--stringEq back f g s =
#if MIN_VERSION_tasty_quickcheck(0,8,4)
-- | Set in front of tests to make them verbose
qcv :: TestTree -> TestTree
qcv = adjustOption (\(QuickCheckVerbose _) -> QuickCheckVerbose True)
-- | Set the number of tests
qcnSet :: Int -> TestTree -> TestTree
qcnSet n = adjustOption (\(QuickCheckTests _) -> QuickCheckTests n)
-- | Scale the number of tests
qcnScale :: Int -> TestTree -> TestTree
qcnScale n = adjustOption (\(QuickCheckTests actual) -> QuickCheckTests (actual * n))
#endif
testCaseFilePath :: [TestTree]
testCaseFilePath = Prelude.map (makeTestCases . (\x -> (show x, x)))
[ "/"
, "."
, ".."
, "C:" </> "Users" </> "haskell-lang"
, "/home"
, "/home" </> "haskell-lang" </> "new hope" </> "foundation"
, "~" </> "new hope" </> "foundation"
, "new hope" </> "foundation"
, "new hope" </> "foundation" </> ".."
, "." </> "new hope" </> ".." </> ".." </> "haskell-lang" </> "new hope"
]
where
makeTestCases :: (String, FilePath) -> TestTree
makeTestCases (title, p) = testGroup title
[ testCase "buildPath . splitPath == id)" $ assertBuildSplitIdemPotent p
, testCase "p == (parent p </> filename p)" $ assertParentFilenameIdemPotent p
]
assertParentFilenameIdemPotent :: FilePath -> Assertion
assertParentFilenameIdemPotent p =
unless (assertEq (parent p </> filename p) p) $
error "assertion failed"
assertBuildSplitIdemPotent :: FilePath -> Assertion
assertBuildSplitIdemPotent p =
unless (assertEq (buildPath $ splitPath p) p) $
error "assertion failed"
testPath :: (Path path, Show path, Eq path)
=> Gen path
-> [TestTree]
testPath genElement =
[ testProperty "buildPath . splitPath == id" $ withElements $ \l -> (buildPath $ splitPath l) === l
]
where
withElements f = forAll genElement f
testBuildable :: (Eq a, IsList a, Show (Element a), Element a ~ Item a, Buildable a)
=> Proxy a -> Gen (Element a) -> Gen (Small Int) -> [TestTree]
testBuildable proxy genElement genChunkSize =
[ testProperty "build s . mapM_ append == id" $ withElementsAndChunkSize $ \(l, Small s) ->
runST (build_ s (Prelude.mapM_ append l)) `asProxyTypeOf` proxy == fromListP proxy l
]
where
withElementsAndChunkSize = forAll ((,) <$> generateListOfElement genElement <*> genChunkSize)
testBoxedZippable :: ( Eq (Element col) , Show (Item a), Show (Item b)
, BoxedZippable col, Zippable a, Zippable b
, Element col ~ (Item a, Item b) )
=> Proxy a -> Proxy b -> Proxy col -> Gen (Element a) -> Gen (Element b) -> [TestTree]
testBoxedZippable proxyA proxyB proxyCol genElementA genElementB =
[ testProperty "zip" $ withList2 $ \(as, bs) ->
toListP proxyCol (zip (fromListP proxyA as) (fromListP proxyB bs)) === zip as bs
, testProperty "zip . unzip == id" $ withListOfTuples $ \xs ->
let (as, bs) = unzip (fromListP proxyCol xs)
in toListP proxyCol (zip (as `asProxyTypeOf` proxyA) (bs `asProxyTypeOf` proxyB)) === xs
]
where
withList2 = forAll ((,) <$> generateListOfElement genElementA <*> generateListOfElement genElementB)
withListOfTuples = forAll (generateListOfElement ((,) <$> genElementA <*> genElementB))
testZippable :: ( Eq (Element col), Show (Item col), Show (Item a), Show (Item b)
, Zippable col, Zippable a, Zippable b )
=> Proxy a -> Proxy b -> Proxy col -> Gen (Element a) -> Gen (Element b) -> Gen (Element col) -> [TestTree]
testZippable proxyA proxyB proxyCol genElementA genElementB genElementCol =
[ testProperty "zipWith" $ withList2AndE $ \(as, bs, c) ->
toListP proxyCol (zipWith (\_ _ -> c) (fromListP proxyA as) (fromListP proxyB bs)
) === replicate (CountOf (Prelude.min (unCountOf $ length as) (unCountOf $ length bs))) c
]
where
unCountOf (CountOf c) = c
withList2AndE = forAll ( (,,) <$> generateListOfElement genElementA <*> generateListOfElement genElementB
<*> genElementCol )
testZippableProps :: (Eq (Item a), Eq (Item b), Show (Item a), Show (Item b), Zippable a, Zippable b)
=> Proxy a -> Proxy b -> Gen (Element a) -> Gen (Element b) -> [TestTree]
testZippableProps proxyA proxyB genElementA genElementB =
[ testProperty "zipWith _|_ [] xs == []" $ withList $ \as ->
toListP proxyA (zipWith undefined [] (fromListP proxyA as)) === []
, testProperty "zipWith f a b == zipWith (flip f) b a" $ withList2 $ \(as, bs) ->
let f = ignore1
as' = fromListP proxyA as
bs' = fromListP proxyB bs
in toListP proxyB (zipWith f as' bs')
=== toListP proxyB (zipWith (flip f) bs' as')
, testProperty "zipWith3 f [...] xs == zipWith id (zipWith f [...]) xs)" $ withList2 $ \(as, bs) ->
let f = ignore2
as' = fromListP proxyA as
bs' = fromListP proxyB bs
in toListP proxyB (zipWith3 f as' as' bs')
=== Prelude.zipWith id (zipWith f as as) bs
, testProperty "zipWith4 f [...] xs == zipWith id (zipWith3 f [...]) xs)" $ withList2 $ \(as, bs) ->
let f = ignore3
as' = fromListP proxyA as
bs' = fromListP proxyB bs
in toListP proxyB (zipWith4 f as' as' as' bs')
=== Prelude.zipWith id (zipWith3 f as as as) bs
, testProperty "zipWith5 f [...] xs == zipWith id (zipWith4 f [...]) xs)" $ withList2 $ \(as, bs) ->
let f = ignore4
as' = fromListP proxyA as
bs' = fromListP proxyB bs
in toListP proxyB (zipWith5 f as' as' as' as' bs')
=== Prelude.zipWith id (zipWith4 f as as as as) bs
, testProperty "zipWith6 f [...] xs == zipWith id (zipWith5 f [...]) xs)" $ withList2 $ \(as, bs) ->
let f = ignore5
as' = fromListP proxyA as
bs' = fromListP proxyB bs
in toListP proxyB (zipWith6 f as' as' as' as' as' bs')
=== Prelude.zipWith id (zipWith5 f as as as as as) bs
, testProperty "zipWith7 f [...] xs == zipWith id (zipWith6 f [...]) xs)" $ withList2 $ \(as, bs) ->
let f = ignore6
as' = fromListP proxyA as
bs' = fromListP proxyB bs
in toListP proxyB (zipWith7 f as' as' as' as' as' as' bs')
=== Prelude.zipWith id (zipWith6 f as as as as as as) bs
]
where
-- ignore the first n arguments
ignore1 = flip const
ignore2 = const . ignore1
ignore3 = const . ignore2
ignore4 = const . ignore3
ignore5 = const . ignore4
ignore6 = const . ignore5
withList = forAll (generateListOfElement genElementA)
withList2 = forAll ((,) <$> generateListOfElement genElementA <*> generateListOfElement genElementB)
tests :: [TestTree]
tests =
[ testArrayRefs
, testChunkedUArrayRefs
, Bits.tests
, testStringRefs
, testGroup "VFS"
[ testGroup "FilePath" $ testCaseFilePath <> (testPath (arbitrary :: Gen FilePath))
]
, testGroup "Number" testNumberRefs
, testGroup "BoxedZippable"
[ testGroup "Array"
[ testGroup "from Array Int"
( testBoxedZippable
(Proxy :: Proxy (Array Int)) (Proxy :: Proxy (Array Int))
(Proxy :: Proxy (Array (Int, Int))) arbitrary arbitrary )
, testGroup "from String"
( testBoxedZippable
(Proxy :: Proxy String) (Proxy :: Proxy String)
(Proxy :: Proxy (Array (Char, Char))) arbitrary arbitrary )
, testGroup "from String and Array Char"
( testBoxedZippable
(Proxy :: Proxy String) (Proxy :: Proxy (Array Char))
(Proxy :: Proxy (Array (Char, Char))) arbitrary arbitrary )
, testGroup "from Array Int and Array Char"
( testBoxedZippable
(Proxy :: Proxy (Array Int)) (Proxy :: Proxy (Array Char))
(Proxy :: Proxy (Array (Int, Char))) arbitrary arbitrary )
]
]
, testGroup "Buildable"
[ testGroup "String"
(testBuildable (Proxy :: Proxy String) arbitrary arbitrary)
, testGroup "Array Int"
(testBuildable (Proxy :: Proxy (Array Int)) arbitrary arbitrary)
, testGroup "Array Char"
(testBuildable (Proxy :: Proxy (Array Char)) arbitrary arbitrary)
, testGroup "UArray Word8"
(testBuildable (Proxy :: Proxy (UArray Word8)) arbitrary arbitrary)
, testGroup "UArray Char"
(testBuildable (Proxy :: Proxy (UArray Char)) arbitrary arbitrary)
]
, testGroup "Zippable"
[ testGroup "String"
[ testGroup "from String"
( testZippable
(Proxy :: Proxy String) (Proxy :: Proxy String)
(Proxy :: Proxy String) arbitrary arbitrary arbitrary )
, testGroup "from Array Char"
( testZippable
(Proxy :: Proxy (Array Char)) (Proxy :: Proxy (Array Char))
(Proxy :: Proxy String) arbitrary arbitrary arbitrary )
, testGroup "from UArray Word8 and Array Int"
( testZippable
(Proxy :: Proxy (UArray Word8)) (Proxy :: Proxy (Array Int))
(Proxy :: Proxy String) arbitrary arbitrary arbitrary )
]
, testGroup "Array"
[ testGroup "from String"
( testZippable
(Proxy :: Proxy String) (Proxy :: Proxy String)
(Proxy :: Proxy (Array Int)) arbitrary arbitrary arbitrary )
, testGroup "from Array Char"
( testZippable
(Proxy :: Proxy (Array Char)) (Proxy :: Proxy (Array Char))
(Proxy :: Proxy (Array Char)) arbitrary arbitrary arbitrary )
, testGroup "from UArray Word8 and Array Int"
( testZippable
(Proxy :: Proxy (UArray Word8)) (Proxy :: Proxy (Array Int))
(Proxy :: Proxy (Array Int)) arbitrary arbitrary arbitrary )
]
, testGroup "UArray"
[ testGroup "from String"
( testZippable
(Proxy :: Proxy String) (Proxy :: Proxy String)
(Proxy :: Proxy (UArray Word8)) arbitrary arbitrary arbitrary )
, testGroup "from Array Char"
( testZippable
(Proxy :: Proxy (Array Char)) (Proxy :: Proxy (Array Char))
(Proxy :: Proxy (UArray Word16)) arbitrary arbitrary arbitrary )
, testGroup "from UArray Word8 and Array Int"
( testZippable
(Proxy :: Proxy (UArray Word8)) (Proxy :: Proxy (Array Int))
(Proxy :: Proxy (UArray Word32)) arbitrary arbitrary arbitrary )
]
, testGroup "Properties"
( testZippableProps (Proxy :: Proxy (Array Int)) (Proxy :: Proxy (Array Char))
arbitrary arbitrary )
]
, testParsers
, testGroup "Issues"
[ testGroup "218"
[ testCase "Foundation Strings" $
let str1 = "aa9a9\154" :: String
str2 = "a9\154" :: String
Just x = uncons $ snd $ breakElem '9' str1
x1 = breakElem '9' $ snd x
x2 = breakElem '9' str2
in if assertEq x1 x2 then return () else error "failed..."
, testCase "Lazy Strings" $
let str1 = "aa9a9\154" :: [Char]
str2 = "a9\154" :: [Char]
Just x = uncons $ snd $ breakElem '9' str1
x1 = breakElem '9' $ snd x
x2 = breakElem '9' str2
in if assertEq x1 x2 then return () else error "failed..."
]
]
]
testCaseModifiedUTF8 :: [Char] -> String -> Assertion
testCaseModifiedUTF8 ghcStr str
| ghcStr == fStr = return ()
| otherwise = assertFailure $ diffList ghcStr fStr
where
fStr :: [Char]
fStr = toList str
main :: IO ()
main = defaultMain $ testGroup "foundation" tests
| vincenthz/hs-foundation | foundation/tests/Tests.hs | bsd-3-clause | 14,456 | 0 | 17 | 4,561 | 4,315 | 2,249 | 2,066 | 268 | 3 |
import qualified Numeric.Units.Dimensional.TF.Test
import qualified Numeric.Units.Dimensional.TF.QuantitiesTest
main = do
Numeric.Units.Dimensional.TF.Test.main
Numeric.Units.Dimensional.TF.QuantitiesTest.main
| bjornbm/dimensional-tf | Test.hs | bsd-3-clause | 215 | 1 | 7 | 16 | 45 | 29 | 16 | 5 | 1 |
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Packages (
-- * GHC packages
array, base, binary, bytestring, cabal, checkApiAnnotations, checkPpr,
compareSizes, compiler, containers, deepseq, deriveConstants, directory,
exceptions, filepath, genapply, genprimopcode, ghc, ghcBoot, ghcBootTh,
ghcCompact, ghcHeap, ghci, ghcPkg, ghcPrim, haddock, haskeline,
hsc2hs, hp2ps, hpc, hpcBin, integerGmp, integerSimple, iserv, iservProxy,
libffi, libiserv, mtl, parsec, pretty, primitive, process, remoteIserv, rts,
runGhc, stm, templateHaskell, terminfo, text, time, timeout, touchy,
transformers, unlit, unix, win32, xhtml, ghcPackages, isGhcPackage,
-- * Package information
programName, nonHsMainPackage, autogenPath, programPath, timeoutPath,
rtsContext, rtsBuildPath, libffiBuildPath, libffiLibraryName,
ensureConfigured
) where
import Hadrian.Package
import Hadrian.Utilities
import Base
import Context
import Oracles.Flag
import Oracles.Setting
-- | These are all GHC packages we know about. Build rules will be generated for
-- all of them. However, not all of these packages will be built. For example,
-- package 'win32' is built only on Windows. @GHC.defaultPackages@ defines
-- default conditions for building each package. Users can add their own
-- packages and modify build default build conditions in "UserSettings".
ghcPackages :: [Package]
ghcPackages =
[ array, base, binary, bytestring, cabal, checkPpr, checkApiAnnotations
, compareSizes, compiler, containers, deepseq, deriveConstants, directory
, exceptions, filepath, genapply, genprimopcode, ghc, ghcBoot, ghcBootTh
, ghcCompact, ghcHeap, ghci, ghcPkg, ghcPrim, haddock, haskeline, hsc2hs
, hp2ps, hpc, hpcBin, integerGmp, integerSimple, iserv, libffi, libiserv, mtl
, parsec, pretty, process, rts, runGhc, stm, templateHaskell
, terminfo, text, time, touchy, transformers, unlit, unix, win32, xhtml
, timeout ]
-- TODO: Optimise by switching to sets of packages.
isGhcPackage :: Package -> Bool
isGhcPackage = (`elem` ghcPackages)
-- | Package definitions, see 'Package'.
array = lib "array"
base = lib "base"
binary = lib "binary"
bytestring = lib "bytestring"
cabal = lib "Cabal" `setPath` "libraries/Cabal/Cabal"
checkApiAnnotations = util "check-api-annotations"
checkPpr = util "check-ppr"
compareSizes = util "compareSizes" `setPath` "utils/compare_sizes"
compiler = top "ghc" `setPath` "compiler"
containers = lib "containers" `setPath` "libraries/containers/containers"
deepseq = lib "deepseq"
deriveConstants = util "deriveConstants"
directory = lib "directory"
exceptions = lib "exceptions"
filepath = lib "filepath"
genapply = util "genapply"
genprimopcode = util "genprimopcode"
ghc = prg "ghc-bin" `setPath` "ghc"
ghcBoot = lib "ghc-boot"
ghcBootTh = lib "ghc-boot-th"
ghcCompact = lib "ghc-compact"
ghcHeap = lib "ghc-heap"
ghci = lib "ghci"
ghcPkg = util "ghc-pkg"
ghcPrim = lib "ghc-prim"
haddock = util "haddock"
haskeline = lib "haskeline"
hsc2hs = util "hsc2hs"
hp2ps = util "hp2ps"
hpc = lib "hpc"
hpcBin = util "hpc-bin" `setPath` "utils/hpc"
integerGmp = lib "integer-gmp"
integerSimple = lib "integer-simple"
iserv = util "iserv"
iservProxy = util "iserv-proxy"
libffi = top "libffi"
libiserv = lib "libiserv"
mtl = lib "mtl"
parsec = lib "parsec"
pretty = lib "pretty"
primitive = lib "primitive"
process = lib "process"
remoteIserv = util "remote-iserv"
rts = top "rts"
runGhc = util "runghc"
stm = lib "stm"
templateHaskell = lib "template-haskell"
terminfo = lib "terminfo"
text = lib "text"
time = lib "time"
timeout = util "timeout" `setPath` "testsuite/timeout"
touchy = util "touchy"
transformers = lib "transformers"
unlit = util "unlit"
unix = lib "unix"
win32 = lib "Win32"
xhtml = lib "xhtml"
-- | Construct a library package, e.g. @array@.
lib :: PackageName -> Package
lib name = library name ("libraries" -/- name)
-- | Construct a top-level library package, e.g. @compiler@.
top :: PackageName -> Package
top name = library name name
-- | Construct a top-level program package, e.g. @ghc@.
prg :: PackageName -> Package
prg name = program name name
-- | Construct a utility package, e.g. @haddock@.
util :: PackageName -> Package
util name = program name ("utils" -/- name)
-- | Amend a package path if it doesn't conform to a typical pattern.
setPath :: Package -> FilePath -> Package
setPath pkg path = pkg { pkgPath = path }
-- | Given a 'Context', compute the name of the program that is built in it
-- assuming that the corresponding package's type is 'Program'. For example, GHC
-- built in 'Stage0' is called @ghc-stage1@. If the given package is a
-- 'Library', the function simply returns its name.
programName :: Context -> Action String
programName Context {..} = do
cross <- flag CrossCompiling
targetPlatform <- setting TargetPlatformFull
let prefix = if cross then targetPlatform ++ "-" else ""
-- TODO: Can we extract this information from Cabal files?
-- Alp: We could, but then the iserv package would have to
-- use Cabal conditionals + a 'profiling' flag
-- to declare the executable name, and I'm not sure
-- this is allowed (or desired for that matter).
return $ prefix ++ case package of
p | p == ghc -> "ghc"
| p == hpcBin -> "hpc"
| p == iserv -> "ghc-iserv" ++ concat [
if wayUnit' `wayUnit` way
then suffix
else ""
| (wayUnit', suffix) <- [
(Profiling, "-prof"),
(Dynamic, "-dyn")
]]
_ -> pkgName package
-- | The 'FilePath' to a program executable in a given 'Context'.
programPath :: Context -> Action FilePath
programPath context@Context {..} = do
-- TODO: The @touchy@ utility lives in the @lib/bin@ directory instead of
-- @bin@, which is likely just a historical accident that should be fixed.
-- See: https://github.com/snowleopard/hadrian/issues/570
-- Likewise for @iserv@ and @unlit@.
name <- programName context
path <- if package `elem` [iserv, touchy, unlit]
then stageLibPath stage <&> (-/- "bin")
else stageBinPath stage
return $ path -/- name <.> exe
-- TODO: Move @timeout@ to the @util@ directory and build in a more standard
-- location like other programs used only by the testsuite.
timeoutPath :: FilePath
timeoutPath = "testsuite/timeout/install-inplace/bin/timeout" <.> exe
-- TODO: Can we extract this information from Cabal files?
-- | Some program packages should not be linked with Haskell main function.
nonHsMainPackage :: Package -> Bool
nonHsMainPackage = (`elem` [ghc, hp2ps, iserv, touchy, unlit])
-- TODO: Combine this with 'programName'.
-- | Path to the @autogen@ directory generated by 'buildAutogenFiles'.
autogenPath :: Context -> Action FilePath
autogenPath context@Context {..}
| isLibrary package = autogen "build"
| package == ghc = autogen "build/ghc"
| package == hpcBin = autogen "build/hpc"
| otherwise = autogen $ "build" -/- pkgName package
where
autogen dir = contextPath context <&> (-/- dir -/- "autogen")
-- | Make sure a given context has already been fully configured. The
-- implementation simply calls 'need' on the context's @autogen/cabal_macros.h@
-- file, which triggers 'configurePackage' and 'buildAutogenFiles'. Why this
-- indirection? Going via @autogen/cabal_macros.h@ allows us to cache the
-- configuration steps, i.e. not to repeat them if they have already been done.
ensureConfigured :: Context -> Action ()
ensureConfigured context = do
autogen <- autogenPath context
need [autogen -/- "cabal_macros.h"]
-- | RTS is considered a Stage1 package. This determines RTS build directory.
rtsContext :: Stage -> Context
rtsContext stage = vanillaContext stage rts
-- | Path to the RTS build directory.
rtsBuildPath :: Stage -> Action FilePath
rtsBuildPath stage = buildPath (rtsContext stage)
-- | Build directory for in-tree 'libffi' library.
libffiBuildPath :: Stage -> Action FilePath
libffiBuildPath stage = buildPath $ Context
stage
libffi
(error "libffiBuildPath: way not set.")
-- | Name of the 'libffi' library.
libffiLibraryName :: Action FilePath
libffiLibraryName = do
useSystemFfi <- flag UseSystemFfi
return $ case (useSystemFfi, windowsHost) of
(True , False) -> "ffi"
(False, False) -> "Cffi"
(_ , True ) -> "Cffi-6"
| sdiehl/ghc | hadrian/src/Packages.hs | bsd-3-clause | 9,492 | 0 | 18 | 2,639 | 1,772 | 994 | 778 | -1 | -1 |
-- HpdH runtime configuration parameters
--
-- Author: Patrick Maier
-----------------------------------------------------------------------------
module Control.Parallel.HdpH.Conf
( -- * HdpH runtime system configuration parameters
RTSConf(..),
defaultRTSConf -- :: RTSConf
) where
import Prelude
import Control.Parallel.HdpH.Internal.Location (dbgNone)
-----------------------------------------------------------------------------
-- Runtime configuration parameters (for RTS monad stack)
-- | 'RTSConf' is a record data type collecting a number of parameter
-- governing the behaviour of the HdpH runtime system.
data RTSConf =
RTSConf {
debugLvl :: Int,
-- ^ Debug level, a number defined in module
-- "Control.Parallel.HdpH.Internal.Location".
-- Default is 0 (corresponding to no debug output).
scheds :: Int,
-- ^ Number of concurrent schedulers per node. Must be positive and
-- should be @<=@ to the number of HECs (as set by GHC RTS option
-- @-N@). Default is 1.
wakeupDly :: Int,
-- ^ Interval in microseconds to wake up sleeping schedulers
-- (which is necessary to recover from a race condition between
-- concurrent schedulers). Must be positive.
-- Default is 1000 (corresponding to 1 millisecond).
maxHops :: Int,
-- ^ Number of hops a FISH message may travel before being considered
-- failed. Must be non-negative. Default is 7.
maxFish :: Int,
-- ^ Low sparkpool watermark for fishing. RTS will send FISH message
-- unless size of spark pool is greater than 'maxFish' (or unless
-- a FISH is outstanding). Must be non-negative;
-- should be @<@ 'minSched'. Default is 1.
minSched :: Int,
-- ^ Low sparkpool watermark for scheduling. RTS will respond to FISH
-- messages by SCHEDULEing sparks unless size of spark pool is less
-- than 'minSched'. Must be non-negative; should be @>@ 'maxFish'.
-- Default is 2.
minFishDly :: Int,
-- ^ After a failed FISH, minimal delay in microseconds before
-- sending another FISH message; the actual delay is chosen randomly
-- between 'minFishDly' and 'maxFishDly'. Must be non-negative; should
-- be @<=@ 'maxFishDly'.
-- Default is 10000 (corresponding to 10 milliseconds).
maxFishDly :: Int,
-- ^ After a failed FISH, maximal delay in microseconds before
-- sending another FISH message; the actual delay is chosen randomly
-- between 'minFishDly' and 'maxFishDly'. Must be non-negative; should
-- be @>=@ 'minFishDly'.
-- Default is 1000000 (corresponding to 1 second).
numProcs :: Int,
-- ^ Number of nodes constituting the distributed runtime system.
-- Must be positive. Default is 1.
networkInterface :: String,
-- ^ Network interface, required to autodetect a node's
-- IP address. The string must be one of the interface names
-- returned by the POSIX command @ifconfig@.
-- Default is @eth0@ (corresponding to the first Ethernet interface).
keepAliveFreq :: Int,
-- ^ Frequency of keep-alive broadcasts to all connected nodes.
-- Must be positive. Default is 0, which disables keep-alives.
deathTiming :: [Int],
chaosMonkey :: Bool
}
-- | Default runtime system configuration parameters.
defaultRTSConf :: RTSConf
defaultRTSConf =
RTSConf {
debugLvl = dbgNone, -- no debug information
scheds = 1, -- only 1 scheduler by default
wakeupDly = 1000, -- wake up one sleeping scheduler every millisecond
maxHops = 1, -- no more than 7 hops per FISH
maxFish = 1, -- send FISH when <= 1 spark in pool
minSched = 2, -- reply with SCHEDULE when >= 2 sparks in pool
minFishDly = 10000, -- delay at least 10 milliseconds after failed FISH
-- maxFishDly = 1000000, -- delay up to 1 second after failed FISH
maxFishDly = 200000, -- delay up to 1 second after failed FISH -- to compensate lack of hopping
numProcs = 1, -- only 1 node by default
keepAliveFreq = 0, -- 0 by default, turning off keep-alives
networkInterface = "eth0", -- first Ethernet adapter default inferface
deathTiming = [],
chaosMonkey = False }
| robstewart57/hdph-rs | src/Control/Parallel/HdpH/Conf.hs | bsd-3-clause | 4,392 | 0 | 9 | 1,134 | 279 | 205 | 74 | 37 | 1 |
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1993-1998
TcRules: Typechecking transformation rules
-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TypeFamilies #-}
module TcRules ( tcRules ) where
import GhcPrelude
import HsSyn
import TcRnTypes
import TcRnMonad
import TcSimplify
import TcMType
import TcType
import TcHsType
import TcExpr
import TcEnv
import TcUnify( buildImplicationFor )
import TcEvidence( mkTcCoVarCo )
import Type
import Id
import Var( EvVar )
import BasicTypes ( RuleName )
import SrcLoc
import Outputable
import FastString
import Bag
import Data.List( partition )
{-
Note [Typechecking rules]
~~~~~~~~~~~~~~~~~~~~~~~~~
We *infer* the typ of the LHS, and use that type to *check* the type of
the RHS. That means that higher-rank rules work reasonably well. Here's
an example (test simplCore/should_compile/rule2.hs) produced by Roman:
foo :: (forall m. m a -> m b) -> m a -> m b
foo f = ...
bar :: (forall m. m a -> m a) -> m a -> m a
bar f = ...
{-# RULES "foo/bar" foo = bar #-}
He wanted the rule to typecheck.
-}
tcRules :: [LRuleDecls GhcRn] -> TcM [LRuleDecls GhcTcId]
tcRules decls = mapM (wrapLocM tcRuleDecls) decls
tcRuleDecls :: RuleDecls GhcRn -> TcM (RuleDecls GhcTcId)
tcRuleDecls (HsRules src decls)
= do { tc_decls <- mapM (wrapLocM tcRule) decls
; return (HsRules src tc_decls) }
tcRule :: RuleDecl GhcRn -> TcM (RuleDecl GhcTcId)
tcRule (HsRule name act hs_bndrs lhs fv_lhs rhs fv_rhs)
= addErrCtxt (ruleCtxt $ snd $ unLoc name) $
do { traceTc "---- Rule ------" (pprFullRuleName name)
-- Note [Typechecking rules]
; (vars, bndr_wanted) <- captureConstraints $
tcRuleBndrs hs_bndrs
-- bndr_wanted constraints can include wildcard hole
-- constraints, which we should not forget about.
-- It may mention the skolem type variables bound by
-- the RULE. c.f. Trac #10072
; let (id_bndrs, tv_bndrs) = partition isId vars
; (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty)
<- tcExtendTyVarEnv tv_bndrs $
tcExtendIdEnv id_bndrs $
do { -- See Note [Solve order for RULES]
((lhs', rule_ty), lhs_wanted) <- captureConstraints (tcInferRho lhs)
; (rhs', rhs_wanted) <- captureConstraints $
tcMonoExpr rhs (mkCheckExpType rule_ty)
; return (lhs', lhs_wanted, rhs', rhs_wanted, rule_ty) }
; traceTc "tcRule 1" (vcat [ pprFullRuleName name
, ppr lhs_wanted
, ppr rhs_wanted ])
; let all_lhs_wanted = bndr_wanted `andWC` lhs_wanted
; (lhs_evs, residual_lhs_wanted) <- simplifyRule (snd $ unLoc name)
all_lhs_wanted
rhs_wanted
-- SimplfyRule Plan, step 4
-- Now figure out what to quantify over
-- c.f. TcSimplify.simplifyInfer
-- We quantify over any tyvars free in *either* the rule
-- *or* the bound variables. The latter is important. Consider
-- ss (x,(y,z)) = (x,z)
-- RULE: forall v. fst (ss v) = fst v
-- The type of the rhs of the rule is just a, but v::(a,(b,c))
--
-- We also need to get the completely-uconstrained tyvars of
-- the LHS, lest they otherwise get defaulted to Any; but we do that
-- during zonking (see TcHsSyn.zonkRule)
; let tpl_ids = lhs_evs ++ id_bndrs
; forall_tkvs <- zonkTcTypesAndSplitDepVars $
rule_ty : map idType tpl_ids
; gbls <- tcGetGlobalTyCoVars -- Even though top level, there might be top-level
-- monomorphic bindings from the MR; test tc111
; qtkvs <- quantifyTyVars gbls forall_tkvs
; traceTc "tcRule" (vcat [ pprFullRuleName name
, ppr forall_tkvs
, ppr qtkvs
, ppr rule_ty
, vcat [ ppr id <+> dcolon <+> ppr (idType id) | id <- tpl_ids ]
])
-- SimplfyRule Plan, step 5
-- Simplify the LHS and RHS constraints:
-- For the LHS constraints we must solve the remaining constraints
-- (a) so that we report insoluble ones
-- (b) so that we bind any soluble ones
; let skol_info = RuleSkol (snd (unLoc name))
; (lhs_implic, lhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs residual_lhs_wanted
; (rhs_implic, rhs_binds) <- buildImplicationFor topTcLevel skol_info qtkvs
lhs_evs rhs_wanted
; emitImplications (lhs_implic `unionBags` rhs_implic)
; return (HsRule name act
(map (noLoc . RuleBndr . noLoc) (qtkvs ++ tpl_ids))
(mkHsDictLet lhs_binds lhs') fv_lhs
(mkHsDictLet rhs_binds rhs') fv_rhs) }
tcRuleBndrs :: [LRuleBndr GhcRn] -> TcM [Var]
tcRuleBndrs []
= return []
tcRuleBndrs (L _ (RuleBndr (L _ name)) : rule_bndrs)
= do { ty <- newOpenFlexiTyVarTy
; vars <- tcRuleBndrs rule_bndrs
; return (mkLocalId name ty : vars) }
tcRuleBndrs (L _ (RuleBndrSig (L _ name) rn_ty) : rule_bndrs)
-- e.g x :: a->a
-- The tyvar 'a' is brought into scope first, just as if you'd written
-- a::*, x :: a->a
= do { let ctxt = RuleSigCtxt name
; (_ , tvs, id_ty) <- tcHsPatSigType ctxt rn_ty
; let id = mkLocalIdOrCoVar name id_ty
-- See Note [Pattern signature binders] in TcHsType
-- The type variables scope over subsequent bindings; yuk
; vars <- tcExtendTyVarEnv2 tvs $
tcRuleBndrs rule_bndrs
; return (map snd tvs ++ id : vars) }
ruleCtxt :: FastString -> SDoc
ruleCtxt name = text "When checking the transformation rule" <+>
doubleQuotes (ftext name)
{-
*********************************************************************************
* *
Constraint simplification for rules
* *
***********************************************************************************
Note [The SimplifyRule Plan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Example. Consider the following left-hand side of a rule
f (x == y) (y > z) = ...
If we typecheck this expression we get constraints
d1 :: Ord a, d2 :: Eq a
We do NOT want to "simplify" to the LHS
forall x::a, y::a, z::a, d1::Ord a.
f ((==) (eqFromOrd d1) x y) ((>) d1 y z) = ...
Instead we want
forall x::a, y::a, z::a, d1::Ord a, d2::Eq a.
f ((==) d2 x y) ((>) d1 y z) = ...
Here is another example:
fromIntegral :: (Integral a, Num b) => a -> b
{-# RULES "foo" fromIntegral = id :: Int -> Int #-}
In the rule, a=b=Int, and Num Int is a superclass of Integral Int. But
we *dont* want to get
forall dIntegralInt.
fromIntegral Int Int dIntegralInt (scsel dIntegralInt) = id Int
because the scsel will mess up RULE matching. Instead we want
forall dIntegralInt, dNumInt.
fromIntegral Int Int dIntegralInt dNumInt = id Int
Even if we have
g (x == y) (y == z) = ..
where the two dictionaries are *identical*, we do NOT WANT
forall x::a, y::a, z::a, d1::Eq a
f ((==) d1 x y) ((>) d1 y z) = ...
because that will only match if the dict args are (visibly) equal.
Instead we want to quantify over the dictionaries separately.
In short, simplifyRuleLhs must *only* squash equalities, leaving
all dicts unchanged, with absolutely no sharing.
Also note that we can't solve the LHS constraints in isolation:
Example foo :: Ord a => a -> a
foo_spec :: Int -> Int
{-# RULE "foo" foo = foo_spec #-}
Here, it's the RHS that fixes the type variable
HOWEVER, under a nested implication things are different
Consider
f :: (forall a. Eq a => a->a) -> Bool -> ...
{-# RULES "foo" forall (v::forall b. Eq b => b->b).
f b True = ...
#-}
Here we *must* solve the wanted (Eq a) from the given (Eq a)
resulting from skolemising the argument type of g. So we
revert to SimplCheck when going under an implication.
--------- So the SimplifyRule Plan is this -----------------------
* Step 0: typecheck the LHS and RHS to get constraints from each
* Step 1: Simplify the LHS and RHS constraints all together in one bag
We do this to discover all unification equalities
* Step 2: Zonk the ORIGINAL (unsimplified) LHS constraints, to take
advantage of those unifications
* Setp 3: Partition the LHS constraints into the ones we will
quantify over, and the others.
See Note [RULE quantification over equalities]
* Step 4: Decide on the type variables to quantify over
* Step 5: Simplify the LHS and RHS constraints separately, using the
quantified constraints as givens
Note [Solve order for RULES]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In step 1 above, we need to be a bit careful about solve order.
Consider
f :: Int -> T Int
type instance T Int = Bool
RULE f 3 = True
From the RULE we get
lhs-constraints: T Int ~ alpha
rhs-constraints: Bool ~ alpha
where 'alpha' is the type that connects the two. If we glom them
all together, and solve the RHS constraint first, we might solve
with alpha := Bool. But then we'd end up with a RULE like
RULE: f 3 |> (co :: T Int ~ Booo) = True
which is terrible. We want
RULE: f 3 = True |> (sym co :: Bool ~ T Int)
So we are careful to solve the LHS constraints first, and *then* the
RHS constraints. Actually much of this is done by the on-the-fly
constraint solving, so the same order must be observed in
tcRule.
Note [RULE quantification over equalities]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Deciding which equalities to quantify over is tricky:
* We do not want to quantify over insoluble equalities (Int ~ Bool)
(a) because we prefer to report a LHS type error
(b) because if such things end up in 'givens' we get a bogus
"inaccessible code" error
* But we do want to quantify over things like (a ~ F b), where
F is a type function.
The difficulty is that it's hard to tell what is insoluble!
So we see whether the simplification step yielded any type errors,
and if so refrain from quantifying over *any* equalities.
Note [Quantifying over coercion holes]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Equality constraints from the LHS will emit coercion hole Wanteds.
These don't have a name, so we can't quantify over them directly.
Instead, because we really do want to quantify here, invent a new
EvVar for the coercion, fill the hole with the invented EvVar, and
then quantify over the EvVar. Not too tricky -- just some
impedance matching, really.
Note [Simplify cloned constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
At this stage, we're simplifying constraints only for insolubility
and for unification. Note that all the evidence is quickly discarded.
We use a clone of the real constraint. If we don't do this,
then RHS coercion-hole constraints get filled in, only to get filled
in *again* when solving the implications emitted from tcRule. That's
terrible, so we avoid the problem by cloning the constraints.
-}
simplifyRule :: RuleName
-> WantedConstraints -- Constraints from LHS
-> WantedConstraints -- Constraints from RHS
-> TcM ( [EvVar] -- Quantify over these LHS vars
, WantedConstraints) -- Residual un-quantified LHS constraints
-- See Note [The SimplifyRule Plan]
-- NB: This consumes all simple constraints on the LHS, but not
-- any LHS implication constraints.
simplifyRule name lhs_wanted rhs_wanted
= do { -- We allow ourselves to unify environment
-- variables: runTcS runs with topTcLevel
; lhs_clone <- cloneWC lhs_wanted
; rhs_clone <- cloneWC rhs_wanted
-- Note [The SimplifyRule Plan] step 1
-- First solve the LHS and *then* solve the RHS
-- Crucially, this performs unifications
-- See Note [Solve order for RULES]
-- See Note [Simplify cloned constraints]
; insoluble <- runTcSDeriveds $
do { lhs_resid <- solveWanteds lhs_clone
; rhs_resid <- solveWanteds rhs_clone
; return ( insolubleWC lhs_resid ||
insolubleWC rhs_resid ) }
-- Note [The SimplifyRule Plan] step 2
; zonked_lhs_simples <- zonkSimples (wc_simple lhs_wanted)
-- Note [The SimplifyRule Plan] step 3
; let quantify_ct :: Ct -> Bool
quantify_ct ct
| EqPred _ t1 t2 <- classifyPredType (ctPred ct)
= not (insoluble || t1 `tcEqType` t2)
-- Note [RULE quantification over equalities]
| isHoleCt ct
= False -- Don't quantify over type holes, obviously
| otherwise
= True
-- Note [The SimplifyRule Plan] step 3
; let (quant_cts, no_quant_cts) = partitionBag quantify_ct
zonked_lhs_simples
; quant_evs <- mapM mk_quant_ev (bagToList quant_cts)
; traceTc "simplifyRule" $
vcat [ text "LHS of rule" <+> doubleQuotes (ftext name)
, text "lhs_wanted" <+> ppr lhs_wanted
, text "rhs_wanted" <+> ppr rhs_wanted
, text "zonked_lhs_simples" <+> ppr zonked_lhs_simples
, text "quant_cts" <+> ppr quant_cts
, text "no_quant_cts" <+> ppr no_quant_cts
]
; return (quant_evs, lhs_wanted { wc_simple = no_quant_cts }) }
where
mk_quant_ev :: Ct -> TcM EvVar
mk_quant_ev ct
| CtWanted { ctev_dest = dest, ctev_pred = pred } <- ctEvidence ct
= case dest of
EvVarDest ev_id -> return ev_id
HoleDest hole -> -- See Note [Quantifying over coercion holes]
do { ev_id <- newEvVar pred
; fillCoercionHole hole (mkTcCoVarCo ev_id)
; return ev_id }
mk_quant_ev ct = pprPanic "mk_quant_ev" (ppr ct)
| ezyang/ghc | compiler/typecheck/TcRules.hs | bsd-3-clause | 14,618 | 1 | 17 | 4,432 | 1,665 | 868 | 797 | 133 | 3 |