code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
-- HTTP/1.1 requests consist of the following components:
--
-- request:
--
-- Request-Line
-- *(( general-header
-- | request-header
-- | entity-header ) CRLF)
-- CRLF
-- [ message-body ]
--
-- request-line:
--
-- method SP request-uri SP http-version CRLF
--
-- method: "OPTIONS"
-- | "GET"
-- | "HEAD"
-- | "POST"
-- | "PUT"
-- | "DELETE"
-- | "TRACE"
-- | "CONNECT"
--
-- request-uri: "*"
-- | absoluteURI
-- | abs_path
-- | authority
--
-- general-header: Cache-Control
-- | Connection
-- | Date
-- | Pragma
-- | Trailer
-- | Transfer-Encoding
-- | Upgrade
-- | Via
-- | Warning
--
-- request-header: Accept
-- | Accept-Charset
-- | Accept-Encoding
-- | Accept-Language
-- | Authorization
-- | Expect
-- | From
-- | Host
-- | If-Match
-- | If-Modified-Since
-- | If-Unmodified-Since
-- | Max-Forwards
-- | Proxy-Authorization
-- | Range
-- | Referer
-- | TE
-- | User-Agent
--
-- entity-header: Allow
-- | Content-Encoding
-- | Content-Language
-- | Content-Length
-- | Content-Location
-- | Content-MD5
-- | Content-Range
-- | Content-Type
-- | Expires
-- | Last-Modified
-- | extension-header
--
-- extension-header: message-header
import qualified Http.Request
import qualified Http.Response
| ndreynolds/hsURL | Main.hs | mit | 1,787 | 0 | 4 | 791 | 83 | 78 | 5 | 2 | 0 |
module Countdown2 where
import Data.List
import Data.Function
import Types
legal :: Op -> Value -> Value -> Bool
legal Add v1 v2 = v1 <= v2
legal Sub v1 v2 = v2 < v1
legal Mul v1 v2 = 1 < v1 && v1 <= v2
legal Div v1 v2 = 1 < v2 && v1 `mod` v2 == 0
countdown2 :: Int -> [Int] -> (Expr, Value)
countdown2 n = nearest n . concatMap mkExprs . subseqs
unmerges :: [a] -> [([a],[a])]
unmerges [x,y] = [([x],[y])]
unmerges (x:xs) = [([x],xs)]
++ map (applyFst (x:)) (unmerges xs)
++ map (applySnd (x:)) (unmerges xs)
-- combine :: (Expr, Value) -> (Expr, Value) -> [(Expr, Value)]
-- combine (e1,v1) (e2,v2) = [(App op e1 e2, apply op v1 v2) | op <- ops, legal op v1 v2]
-- ++ [(App op e2 e1, apply op v2 v1) | op <- ops, legal op v2 v1]
-- where ops = [Add,Sub,Mul,Div]
combine :: (Expr, Value) -> (Expr, Value) -> [(Expr, Value)]
combine (e1,v1) (e2,v2)
| v1 < v2 = comb1 (e1,v1) (e2,v2)
| v1 == v2 = comb2 (e1,v1) (e2,v2)
| v1 > v2 = comb1 (e2,v2) (e1,v1)
comb1 :: (Expr, Value) -> (Expr, Value) -> [(Expr, Value)]
comb1 (e1,v1) (e2,v2) = [(App Add e1 e2, v1+v2), (App Sub e1 e2, v2-v1)] ++
if 1 < v1
then [(App Mul e1 e2, v1*v2)] ++ [(App Div e1 e2, q) | r == 0]
else []
where (q,r) = divMod v2 v1
comb2 :: (Expr, Value) -> (Expr, Value) -> [(Expr, Value)]
comb2 (e1,v1) (e2,v2) = [(App Add e1 e2, v1+v2)] ++
if 1 < v1
then [(App Mul e1 e2, v1*v2)] ++ [(App Div e1 e2, 1)]
else []
-- same as countdown1
subseqs :: [Value] -> [[Value]]
subseqs [x] = [[x]]
subseqs (x:xs) = xss ++ [x] : map (x:) xss
where xss = subseqs xs
value :: Expr -> Value
value (Num n) = n
value (App op e1 e2) = apply op (value e1) (value e2)
apply :: Op -> Value -> Value -> Value
apply Add = (+)
apply Sub = (-)
apply Mul = (*)
apply Div = div
mkExprs :: [Value] -> [(Expr, Value)]
mkExprs [x] = [(Num x, x)]
mkExprs xs = [ev | (ys,zs) <- unmerges xs
,ev1 <- mkExprs ys
,ev2 <- mkExprs zs
,ev <- combine ev1 ev2]
nearest :: Int -> [(Expr, Value)] -> (Expr, Value)
-- nearest n ts = minimumBy (compare `on` snd) [(e, abs $ v - n) | (e,v) <- ts]
nearest n ((e,v):evs) = if d == 0 then (e,v)
else search n d (e,v) evs
where d = abs (n - v)
search n d ev [] = ev
search n d ev ((e,v):evs)
| d' == 0 = (e,v)
| d' < d = search n d' (e,v) evs
| d' >= d = search n d ev evs
where d' = abs (n - v)
--
applyFst :: (a -> b) -> (a, c) -> (b, c)
applyFst f (x,y) = (f x, y)
applySnd :: (b -> c) -> (a, b) -> (a, c)
applySnd f (x,y) = (x, f y)
| y-kamiya/functional-algorithm-design | src/Countdown/Countdown2.hs | mit | 2,816 | 0 | 10 | 942 | 1,405 | 781 | 624 | 64 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module Print (fromValue, fromTagged, encode) where
import Data.Monoid (mappend)
import qualified Data.Text as T
import Data.Text.Lazy.Builder
import Data.Text.Lazy.Builder.Int (decimal)
import Data.Text.Lazy.Builder.RealFloat (realFloat)
import Data.Text.Lazy.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as L
import qualified Data.Vector as V
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Data.EDN.Types as E
import Data.EDN.Types.Class (ToEDN, toEDN)
-- | Encode a Tagged EDN value to a 'Builder'.
fromTagged :: Int -> E.TaggedValue -> Builder
fromTagged n (E.NoTag v) = fromValue n v
fromTagged n (E.Tagged v "" t) = singleton '#' <> string t <> " " <> fromValue n v
fromTagged n (E.Tagged v ns t) = singleton '#' <> string ns <> singleton '/' <> string t <> " " <> fromValue n v
spaces :: Int -> Builder
spaces n = fromString $ replicate n '\t'
-- | Encode a raw EDN value to a 'Builder'.
fromValue :: Int -> E.Value -> Builder
fromValue _ E.Nil = "nil"
fromValue _ (E.Boolean b) = if b then "true" else "false"
fromValue _ (E.String t) = "\"" <> quote t <> "\""
fromValue _ (E.Character c) = "\\" <> quoteChar c
fromValue _ (E.Symbol "" v) = string v
fromValue _ (E.Symbol ns v) = string ns <> "/" <> string v
fromValue _ (E.Keyword kw) = ":" <> string kw
fromValue _ (E.Integer i) = decimal i
fromValue _ (E.Floating f) = realFloat f
fromValue n (E.List xs) = "(" <> fromList n xs <> ")"
fromValue n (E.Vec xs) = "[" <> fromList n (V.toList xs) <> "]"
fromValue n (E.Set xs) = "#{" <> fromList n (S.toList xs) <> "}"
fromValue n (E.Map as) = "{" <> "\n" <> fromAssoc (n+1) (M.assocs as) <> spaces n <> "}"
string :: BS.ByteString -> Builder
string s = fromLazyText . decodeUtf8 . L.fromChunks $ [s]
quote :: T.Text -> Builder
quote q = case T.uncons t of
Nothing -> fromText h
Just (c, t') -> fromText h <> escape c <> quote t'
where
(h, t) = T.break isEscape q
isEscape c = c == '\"' || c == '\\' || c < '\x20'
escape '\"' = "\\\""
escape '\\' = "\\\\"
escape '\n' = "\\n"
escape '\r' = "\\r"
escape '\t' = "\\t"
escape c = singleton c
quoteChar :: Char -> Builder
quoteChar c = case c of
'\n' -> string "newline"
'\r' -> string "return"
'\t' -> string "tab"
' ' -> string "space"
_ -> singleton c
fromList :: Int -> [E.TaggedValue] -> Builder
fromList _ [] = ""
fromList n (x:[]) = fromTagged n x
fromList n (x:xs) = fromTagged n x <> " " <> fromList n xs
fromAssoc :: Int -> [(E.Value, E.TaggedValue)] -> Builder
fromAssoc _ [] = ""
fromAssoc n ((k, v):[]) = spaces n <> fromValue n k <> " " <> fromTagged n v <> "\n"
fromAssoc n ((k, v):as) = spaces n <> fromValue n k <> " " <> fromTagged n v <> "\n" <> fromAssoc n as
-- | Serialize a value as a lazy 'L.ByteString'.
encode :: ToEDN a => a -> L.ByteString
encode = encodeUtf8 . toLazyText . fromTagged 0 . toEDN
{-# INLINE encode #-}
(<>) :: Builder -> Builder -> Builder
(<>) = mappend
{-# INLINE (<>) #-}
infixr 6 <>
| sordina/Edn | Print.hs | mit | 3,176 | 0 | 11 | 713 | 1,296 | 676 | 620 | 71 | 7 |
{-# LANGUAGE ConstrainedClassMethods #-}
-----------------------------------------------------------------------------
-- |
-- Module : Algebra.Graph.ToGraph
-- Copyright : (c) Andrey Mokhov 2016-2022
-- License : MIT (see the file LICENSE)
-- Maintainer : [email protected]
-- Stability : experimental
--
-- __Alga__ is a library for algebraic construction and manipulation of graphs
-- in Haskell. See <https://github.com/snowleopard/alga-paper this paper> for the
-- motivation behind the library, the underlying theory, and implementation details.
--
-- This module defines the type class 'ToGraph' for capturing data types that
-- can be converted to algebraic graphs. To make an instance of this class you
-- need to define just a single method ('toGraph' or 'foldg'), which gives you
-- access to many other useful methods for free (although note that the default
-- implementations may be suboptimal performance-wise).
--
-- This type class is similar to the standard type class 'Data.Foldable.Foldable'
-- defined for lists. Furthermore, one can define 'Foldable' methods 'foldMap'
-- and 'Data.Foldable.toList' using @ToGraph@.'foldg':
--
-- @
-- 'foldMap' f = 'foldg' 'mempty' f ('<>') ('<>')
-- 'Data.Foldable.toList' = 'foldg' [] 'pure' ('++') ('++')
-- @
--
-- However, the resulting 'Foldable' instance is problematic. For example,
-- folding equivalent algebraic graphs @1@ and @1@ + @1@ leads to different
-- results:
--
-- @
-- 'Data.Foldable.toList' (1 ) == [1]
-- 'Data.Foldable.toList' (1 + 1) == [1, 1]
-- @
--
-- To avoid such cases, we do not provide 'Foldable' instances for algebraic
-- graph datatypes. Furthermore, we require that the four arguments passed to
-- 'foldg' satisfy the laws of the algebra of graphs. The above definitions
-- of 'foldMap' and 'Data.Foldable.toList' violate this requirement, for example
-- @[1] ++ [1] /= [1]@, and are therefore disallowed.
-----------------------------------------------------------------------------
module Algebra.Graph.ToGraph (
-- * Type class
ToGraph (..),
-- * Derived functions
adjacencyMap, adjacencyIntMap, adjacencyMapTranspose, adjacencyIntMapTranspose
) where
import Data.IntMap (IntMap)
import Data.IntSet (IntSet)
import Data.Map (Map)
import Data.Set (Set)
import Data.Tree
import qualified Algebra.Graph as G
import qualified Algebra.Graph.AdjacencyMap as AM
import qualified Algebra.Graph.AdjacencyMap.Algorithm as AM
import qualified Algebra.Graph.Labelled as LG
import qualified Algebra.Graph.Labelled.AdjacencyMap as LAM
import qualified Algebra.Graph.NonEmpty.AdjacencyMap as NAM
import qualified Algebra.Graph.AdjacencyIntMap as AIM
import qualified Algebra.Graph.AdjacencyIntMap.Algorithm as AIM
import qualified Algebra.Graph.Relation as R
import qualified Algebra.Graph.Relation.Symmetric as SR
import qualified Data.IntMap as IntMap
import qualified Data.IntSet as IntSet
import qualified Data.Map as Map
import qualified Data.Set as Set
-- | The 'ToGraph' type class captures data types that can be converted to
-- algebraic graphs. Instances of this type class should satisfy the laws
-- specified by the default method definitions.
class ToGraph t where
{-# MINIMAL toGraph | foldg #-}
-- | The type of vertices of the resulting graph.
type ToVertex t
-- | Convert a value to the corresponding algebraic graph, see "Algebra.Graph".
--
-- @
-- toGraph == 'foldg' 'G.Empty' 'G.Vertex' 'G.Overlay' 'G.Connect'
-- @
toGraph :: t -> G.Graph (ToVertex t)
toGraph = foldg G.Empty G.Vertex G.Overlay G.Connect
-- | The method 'foldg' is used for generalised graph folding. It collapses
-- a given value by applying the provided graph construction primitives. The
-- order of arguments is: empty, vertex, overlay and connect, and it is
-- assumed that the arguments satisfy the axioms of the graph algebra.
--
-- @
-- foldg == Algebra.Graph.'G.foldg' . 'toGraph'
-- @
foldg :: r -> (ToVertex t -> r) -> (r -> r -> r) -> (r -> r -> r) -> t -> r
foldg e v o c = G.foldg e v o c . toGraph
-- | Check if a graph is empty.
--
-- @
-- isEmpty == 'foldg' True ('const' False) (&&) (&&)
-- @
isEmpty :: t -> Bool
isEmpty = foldg True (const False) (&&) (&&)
-- | Check if a graph contains a given vertex.
--
-- @
-- hasVertex x == 'foldg' False (==x) (||) (||)
-- @
hasVertex :: Eq (ToVertex t) => ToVertex t -> t -> Bool
hasVertex x = foldg False (==x) (||) (||)
-- | Check if a graph contains a given edge.
--
-- @
-- hasEdge x y == Algebra.Graph.'G.hasEdge' x y . 'toGraph'
-- @
hasEdge :: Eq (ToVertex t) => ToVertex t -> ToVertex t -> t -> Bool
hasEdge x y = G.hasEdge x y . toGraph
-- | The number of vertices in a graph.
--
-- @
-- vertexCount == Set.'Set.size' . 'vertexSet'
-- @
vertexCount :: Ord (ToVertex t) => t -> Int
vertexCount = Set.size . vertexSet
-- | The number of edges in a graph.
--
-- @
-- edgeCount == Set.'Set.size' . 'edgeSet'
-- @
edgeCount :: Ord (ToVertex t) => t -> Int
edgeCount = AM.edgeCount . toAdjacencyMap
-- | The sorted list of vertices of a given graph.
--
-- @
-- vertexList == Set.'Set.toAscList' . 'vertexSet'
-- @
vertexList :: Ord (ToVertex t) => t -> [ToVertex t]
vertexList = Set.toAscList . vertexSet
-- | The sorted list of edges of a graph.
--
-- @
-- edgeList == Set.'Set.toAscList' . 'edgeSet'
-- @
edgeList :: Ord (ToVertex t) => t -> [(ToVertex t, ToVertex t)]
edgeList = AM.edgeList . toAdjacencyMap
-- | The set of vertices of a graph.
--
-- @
-- vertexSet == 'foldg' Set.'Set.empty' Set.'Set.singleton' Set.'Set.union' Set.'Set.union'
-- @
vertexSet :: Ord (ToVertex t) => t -> Set (ToVertex t)
vertexSet = foldg Set.empty Set.singleton Set.union Set.union
-- | The set of vertices of a graph. Like 'vertexSet' but specialised for
-- graphs with vertices of type 'Int'.
--
-- @
-- vertexIntSet == 'foldg' IntSet.'IntSet.empty' IntSet.'IntSet.singleton' IntSet.'IntSet.union' IntSet.'IntSet.union'
-- @
vertexIntSet :: ToVertex t ~ Int => t -> IntSet
vertexIntSet = foldg IntSet.empty IntSet.singleton IntSet.union IntSet.union
-- | The set of edges of a graph.
--
-- @
-- edgeSet == Algebra.Graph.AdjacencyMap.'AM.edgeSet' . 'toAdjacencyMap'
-- @
edgeSet :: Ord (ToVertex t) => t -> Set (ToVertex t, ToVertex t)
edgeSet = AM.edgeSet . toAdjacencyMap
-- | The /preset/ of a vertex is the set of its /direct predecessors/.
--
-- @
-- preSet x == Algebra.Graph.AdjacencyMap.'AM.preSet' x . 'toAdjacencyMap'
-- @
preSet :: Ord (ToVertex t) => ToVertex t -> t -> Set (ToVertex t)
preSet x = AM.postSet x . toAdjacencyMapTranspose
-- | The /preset/ (here @preIntSet@) of a vertex is the set of its
-- /direct predecessors/. Like 'preSet' but specialised for graphs with
-- vertices of type 'Int'.
--
-- @
-- preIntSet x == Algebra.Graph.AdjacencyIntMap.'AIM.preIntSet' x . 'toAdjacencyIntMap'
-- @
preIntSet :: ToVertex t ~ Int => Int -> t -> IntSet
preIntSet x = AIM.postIntSet x . toAdjacencyIntMapTranspose
-- | The /postset/ of a vertex is the set of its /direct successors/.
--
-- @
-- postSet x == Algebra.Graph.AdjacencyMap.'AM.postSet' x . 'toAdjacencyMap'
-- @
postSet :: Ord (ToVertex t) => ToVertex t -> t -> Set (ToVertex t)
postSet x = AM.postSet x . toAdjacencyMap
-- | The /postset/ (here @postIntSet@) of a vertex is the set of its
-- /direct successors/. Like 'postSet' but specialised for graphs with
-- vertices of type 'Int'.
--
-- @
-- postIntSet x == Algebra.Graph.AdjacencyIntMap.'AIM.postIntSet' x . 'toAdjacencyIntMap'
-- @
postIntSet :: ToVertex t ~ Int => Int -> t -> IntSet
postIntSet x = AIM.postIntSet x . toAdjacencyIntMap
-- | The sorted /adjacency list/ of a graph.
--
-- @
-- adjacencyList == Algebra.Graph.AdjacencyMap.'AM.adjacencyList' . 'toAdjacencyMap'
-- @
adjacencyList :: Ord (ToVertex t) => t -> [(ToVertex t, [ToVertex t])]
adjacencyList = AM.adjacencyList . toAdjacencyMap
-- | Compute the /depth-first search/ forest of a graph that corresponds to
-- searching from each of the graph vertices in the 'Ord' @a@ order.
--
-- @
-- dfsForest == Algebra.Graph.AdjacencyMap.'AM.dfsForest' . toAdjacencyMap
-- @
dfsForest :: Ord (ToVertex t) => t -> Forest (ToVertex t)
dfsForest = AM.dfsForest . toAdjacencyMap
-- | Compute the /depth-first search/ forest of a graph, searching from each
-- of the given vertices in order. Note that the resulting forest does not
-- necessarily span the whole graph, as some vertices may be unreachable.
--
-- @
-- dfsForestFrom vs == Algebra.Graph.AdjacencyMap.'AM.dfsForestFrom' vs . toAdjacencyMap
-- @
dfsForestFrom :: Ord (ToVertex t) => [ToVertex t] -> t -> Forest (ToVertex t)
dfsForestFrom vs = AM.dfsForestFrom vs . toAdjacencyMap
-- | Compute the list of vertices visited by the /depth-first search/ in a
-- graph, when searching from each of the given vertices in order.
--
-- @
-- dfs vs == Algebra.Graph.AdjacencyMap.'AM.dfs' vs . toAdjacencyMap
-- @
dfs :: Ord (ToVertex t) => [ToVertex t] -> t -> [ToVertex t]
dfs vs = AM.dfs vs . toAdjacencyMap
-- | Compute the list of vertices that are /reachable/ from a given source
-- vertex in a graph. The vertices in the resulting list appear in the
-- /depth-first order/.
--
-- @
-- reachable x == Algebra.Graph.AdjacencyMap.'AM.reachable' x . toAdjacencyMap
-- @
reachable :: Ord (ToVertex t) => ToVertex t -> t -> [ToVertex t]
reachable x = AM.reachable x . toAdjacencyMap
-- | Compute the /topological sort/ of a graph or a @AM.Cycle@ if the
-- graph is cyclic.
--
-- @
-- topSort == Algebra.Graph.AdjacencyMap.'AM.topSort' . toAdjacencyMap
-- @
topSort :: Ord (ToVertex t) => t -> Either (AM.Cycle (ToVertex t)) [ToVertex t]
topSort = AM.topSort . toAdjacencyMap
-- | Check if a given graph is /acyclic/.
--
-- @
-- isAcyclic == Algebra.Graph.AdjacencyMap.'AM.isAcyclic' . toAdjacencyMap
-- @
isAcyclic :: Ord (ToVertex t) => t -> Bool
isAcyclic = AM.isAcyclic . toAdjacencyMap
-- | Convert a value to the corresponding 'AM.AdjacencyMap'.
--
-- @
-- toAdjacencyMap == 'foldg' 'AM.empty' 'AM.vertex' 'AM.overlay' 'AM.connect'
-- @
toAdjacencyMap :: Ord (ToVertex t) => t -> AM.AdjacencyMap (ToVertex t)
toAdjacencyMap = foldg AM.empty AM.vertex AM.overlay AM.connect
-- | Convert a value to the corresponding 'AM.AdjacencyMap' and transpose the
-- result.
--
-- @
-- toAdjacencyMapTranspose == 'foldg' 'AM.empty' 'AM.vertex' 'AM.overlay' ('flip' 'AM.connect')
-- @
toAdjacencyMapTranspose :: Ord (ToVertex t) => t -> AM.AdjacencyMap (ToVertex t)
toAdjacencyMapTranspose = foldg AM.empty AM.vertex AM.overlay (flip AM.connect)
-- | Convert a value to the corresponding 'AIM.AdjacencyIntMap'.
--
-- @
-- toAdjacencyIntMap == 'foldg' 'AIM.empty' 'AIM.vertex' 'AIM.overlay' 'AIM.connect'
-- @
toAdjacencyIntMap :: ToVertex t ~ Int => t -> AIM.AdjacencyIntMap
toAdjacencyIntMap = foldg AIM.empty AIM.vertex AIM.overlay AIM.connect
-- | Convert a value to the corresponding 'AIM.AdjacencyIntMap' and transpose
-- the result.
--
-- @
-- toAdjacencyIntMapTranspose == 'foldg' 'AIM.empty' 'AIM.vertex' 'AIM.overlay' ('flip' 'AIM.connect')
-- @
toAdjacencyIntMapTranspose :: ToVertex t ~ Int => t -> AIM.AdjacencyIntMap
toAdjacencyIntMapTranspose = foldg AIM.empty AIM.vertex AIM.overlay (flip AIM.connect)
-- | Check if a given forest is a valid /depth-first search/ forest of a
-- graph.
--
-- @
-- isDfsForestOf f == Algebra.Graph.AdjacencyMap.'AM.isDfsForestOf' f . toAdjacencyMap
-- @
isDfsForestOf :: Ord (ToVertex t) => Forest (ToVertex t) -> t -> Bool
isDfsForestOf f = AM.isDfsForestOf f . toAdjacencyMap
-- | Check if a given list of vertices is a valid /topological sort/ of a
-- graph.
--
-- @
-- isTopSortOf vs == Algebra.Graph.AdjacencyMap.'AM.isTopSortOf' vs . toAdjacencyMap
-- @
isTopSortOf :: Ord (ToVertex t) => [ToVertex t] -> t -> Bool
isTopSortOf vs = AM.isTopSortOf vs . toAdjacencyMap
instance Ord a => ToGraph (G.Graph a) where
type ToVertex (G.Graph a) = a
toGraph = id
foldg = G.foldg
hasEdge = G.hasEdge
-- | See "Algebra.Graph.AdjacencyMap".
instance Ord a => ToGraph (AM.AdjacencyMap a) where
type ToVertex (AM.AdjacencyMap a) = a
toGraph = G.stars
. map (fmap Set.toList)
. Map.toList
. AM.adjacencyMap
isEmpty = AM.isEmpty
hasVertex = AM.hasVertex
hasEdge = AM.hasEdge
vertexCount = AM.vertexCount
edgeCount = AM.edgeCount
vertexList = AM.vertexList
vertexSet = AM.vertexSet
vertexIntSet = IntSet.fromAscList . AM.vertexList
edgeList = AM.edgeList
edgeSet = AM.edgeSet
adjacencyList = AM.adjacencyList
preSet = AM.preSet
postSet = AM.postSet
dfsForest = AM.dfsForest
dfsForestFrom = AM.dfsForestFrom
dfs = AM.dfs
reachable = AM.reachable
topSort = AM.topSort
isAcyclic = AM.isAcyclic
toAdjacencyMap = id
toAdjacencyIntMap = AIM.fromAdjacencyMap
toAdjacencyMapTranspose = AM.transpose . toAdjacencyMap
toAdjacencyIntMapTranspose = AIM.transpose . toAdjacencyIntMap
isDfsForestOf = AM.isDfsForestOf
isTopSortOf = AM.isTopSortOf
instance ToGraph AIM.AdjacencyIntMap where
type ToVertex AIM.AdjacencyIntMap = Int
toGraph = G.stars
. map (fmap IntSet.toList)
. IntMap.toList
. AIM.adjacencyIntMap
isEmpty = AIM.isEmpty
hasVertex = AIM.hasVertex
hasEdge = AIM.hasEdge
vertexCount = AIM.vertexCount
edgeCount = AIM.edgeCount
vertexList = AIM.vertexList
vertexSet = Set.fromAscList . IntSet.toAscList . AIM.vertexIntSet
vertexIntSet = AIM.vertexIntSet
edgeList = AIM.edgeList
edgeSet = AIM.edgeSet
adjacencyList = AIM.adjacencyList
preIntSet = AIM.preIntSet
postIntSet = AIM.postIntSet
dfsForest = AIM.dfsForest
dfsForestFrom = AIM.dfsForestFrom
dfs = AIM.dfs
reachable = AIM.reachable
topSort = AIM.topSort
isAcyclic = AIM.isAcyclic
toAdjacencyMap = AM.stars . AIM.adjacencyList
toAdjacencyIntMap = id
toAdjacencyMapTranspose = AM.transpose . toAdjacencyMap
toAdjacencyIntMapTranspose = AIM.transpose . toAdjacencyIntMap
isDfsForestOf = AIM.isDfsForestOf
isTopSortOf = AIM.isTopSortOf
-- | See "Algebra.Graph.Labelled".
instance (Eq e, Monoid e, Ord a) => ToGraph (LG.Graph e a) where
type ToVertex (LG.Graph e a) = a
foldg e v o c = LG.foldg e v (\e -> if e == mempty then o else c)
vertexList = LG.vertexList
vertexSet = LG.vertexSet
toAdjacencyMap = LAM.skeleton
. LG.foldg LAM.empty LAM.vertex LAM.connect
toAdjacencyMapTranspose = LAM.skeleton
. LG.foldg LAM.empty LAM.vertex (fmap flip LAM.connect)
toAdjacencyIntMap = toAdjacencyIntMap . toAdjacencyMap
toAdjacencyIntMapTranspose = toAdjacencyIntMapTranspose . toAdjacencyMapTranspose
-- | See "Algebra.Graph.Labelled.AdjacencyMap".
instance (Eq e, Monoid e, Ord a) => ToGraph (LAM.AdjacencyMap e a) where
type ToVertex (LAM.AdjacencyMap e a) = a
toGraph = toGraph . LAM.skeleton
foldg e v o c = foldg e v o c . LAM.skeleton
isEmpty = LAM.isEmpty
hasVertex = LAM.hasVertex
hasEdge = LAM.hasEdge
vertexCount = LAM.vertexCount
edgeCount = LAM.edgeCount
vertexList = LAM.vertexList
vertexSet = LAM.vertexSet
vertexIntSet = IntSet.fromAscList . LAM.vertexList
edgeList = edgeList . LAM.skeleton
edgeSet = edgeSet . LAM.skeleton
adjacencyList = adjacencyList . LAM.skeleton
preSet = LAM.preSet
postSet = LAM.postSet
toAdjacencyMap = LAM.skeleton
toAdjacencyIntMap = toAdjacencyIntMap . LAM.skeleton
toAdjacencyMapTranspose = toAdjacencyMapTranspose . LAM.skeleton
toAdjacencyIntMapTranspose = toAdjacencyIntMapTranspose . LAM.skeleton
-- | See "Algebra.Graph.NonEmpty.AdjacencyMap".
instance Ord a => ToGraph (NAM.AdjacencyMap a) where
type ToVertex (NAM.AdjacencyMap a) = a
toGraph = toGraph . toAdjacencyMap
isEmpty _ = False
hasVertex = NAM.hasVertex
hasEdge = NAM.hasEdge
vertexCount = NAM.vertexCount
edgeCount = NAM.edgeCount
vertexList = vertexList . toAdjacencyMap
vertexSet = NAM.vertexSet
vertexIntSet = vertexIntSet . toAdjacencyMap
edgeList = NAM.edgeList
edgeSet = NAM.edgeSet
adjacencyList = adjacencyList . toAdjacencyMap
preSet = NAM.preSet
postSet = NAM.postSet
dfsForest = dfsForest . toAdjacencyMap
dfsForestFrom xs = dfsForestFrom xs . toAdjacencyMap
dfs xs = dfs xs . toAdjacencyMap
reachable x = reachable x . toAdjacencyMap
topSort = topSort . toAdjacencyMap
isAcyclic = isAcyclic . toAdjacencyMap
toAdjacencyMap = NAM.fromNonEmpty
toAdjacencyIntMap = toAdjacencyIntMap . toAdjacencyMap
toAdjacencyMapTranspose = toAdjacencyMap . NAM.transpose
toAdjacencyIntMapTranspose = toAdjacencyIntMap . NAM.transpose
isDfsForestOf f = isDfsForestOf f . toAdjacencyMap
isTopSortOf x = isTopSortOf x . toAdjacencyMap
-- TODO: Get rid of "Relation.Internal" and move this instance to "Relation".
-- | See "Algebra.Graph.Relation".
instance Ord a => ToGraph (R.Relation a) where
type ToVertex (R.Relation a) = a
toGraph r = G.vertices (Set.toList $ R.domain r) `G.overlay`
G.edges (Set.toList $ R.relation r)
isEmpty = R.isEmpty
hasVertex = R.hasVertex
hasEdge = R.hasEdge
vertexCount = R.vertexCount
edgeCount = R.edgeCount
vertexList = R.vertexList
vertexSet = R.vertexSet
vertexIntSet = IntSet.fromAscList . R.vertexList
edgeList = R.edgeList
edgeSet = R.edgeSet
adjacencyList = R.adjacencyList
toAdjacencyMap = AM.stars . R.adjacencyList
toAdjacencyIntMap = AIM.stars . R.adjacencyList
toAdjacencyMapTranspose = AM.transpose . toAdjacencyMap
toAdjacencyIntMapTranspose = AIM.transpose . toAdjacencyIntMap
-- TODO: This instance is probably wrong because of the way it treats edges.
-- Find out a better way to integrate undirected graphs into 'ToGraph'.
-- | See "Algebra.Graph.Symmetric.Relation". Warning: this instance is likely to
-- be modified or removed in future.
instance Ord a => ToGraph (SR.Relation a) where
type ToVertex (SR.Relation a) = a
toGraph = toGraph . SR.fromSymmetric
isEmpty = SR.isEmpty
hasVertex = SR.hasVertex
hasEdge = SR.hasEdge
vertexCount = SR.vertexCount
edgeCount = SR.edgeCount
vertexList = SR.vertexList
vertexSet = SR.vertexSet
vertexIntSet = IntSet.fromAscList . SR.vertexList
edgeList = SR.edgeList
edgeSet = SR.edgeSet
adjacencyList = SR.adjacencyList
toAdjacencyMap = toAdjacencyMap . SR.fromSymmetric
toAdjacencyIntMap = toAdjacencyIntMap . SR.fromSymmetric
toAdjacencyMapTranspose = toAdjacencyMap
toAdjacencyIntMapTranspose = toAdjacencyIntMap
-- | The /adjacency map/ of a graph: each vertex is associated with a set of its
-- /direct successors/.
--
-- @
-- adjacencyMap == Algebra.Graph.AdjacencyMap.'Algebra.Graph.AdjacencyMap.adjacencyMap' . 'toAdjacencyMap'
-- @
adjacencyMap :: ToGraph t => Ord (ToVertex t) => t -> Map (ToVertex t) (Set (ToVertex t))
adjacencyMap = AM.adjacencyMap . toAdjacencyMap
-- | The /adjacency map/ of a graph: each vertex is associated with a set of its
-- /direct successors/. Like 'adjacencyMap' but specialised for graphs with
-- vertices of type 'Int'.
--
-- @
-- adjacencyIntMap == Algebra.Graph.AdjacencyIntMap.'Algebra.Graph.AdjacencyIntMap.adjacencyIntMap' . 'toAdjacencyIntMap'
-- @
adjacencyIntMap :: (ToGraph t, ToVertex t ~ Int) => t -> IntMap IntSet
adjacencyIntMap = AIM.adjacencyIntMap . toAdjacencyIntMap
-- | The transposed /adjacency map/ of a graph: each vertex is associated with a
-- set of its /direct predecessors/.
--
-- @
-- adjacencyMapTranspose == Algebra.Graph.AdjacencyMap.'Algebra.Graph.AdjacencyMap.adjacencyMap' . 'toAdjacencyMapTranspose'
-- @
adjacencyMapTranspose :: (ToGraph t, Ord (ToVertex t)) => t -> Map (ToVertex t) (Set (ToVertex t))
adjacencyMapTranspose = AM.adjacencyMap . toAdjacencyMapTranspose
-- | The transposed /adjacency map/ of a graph: each vertex is associated with a
-- set of its /direct predecessors/. Like 'adjacencyMapTranspose' but
-- specialised for graphs with vertices of type 'Int'.
--
-- @
-- adjacencyIntMapTranspose == Algebra.Graph.AdjacencyIntMap.'Algebra.Graph.AdjacencyIntMap.adjacencyIntMap' . 'toAdjacencyIntMapTranspose'
-- @
adjacencyIntMapTranspose :: (ToGraph t, ToVertex t ~ Int) => t -> IntMap IntSet
adjacencyIntMapTranspose = AIM.adjacencyIntMap . toAdjacencyIntMapTranspose
| snowleopard/alga | src/Algebra/Graph/ToGraph.hs | mit | 23,734 | 0 | 13 | 6,979 | 3,792 | 2,128 | 1,664 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Products.ProductRepo
( ProductRepo (..)
, Git.FileModification (..)
, Git.ParseResult
, Git.parseStatusDiff
, getStatusDiff
, codeRepositoryDir
, fetchRepo
, findProductRepos
, findProductRepo
, updateRepo
) where
import CommonCreatures (WithErr)
import Config.Config (GitConfig (..))
import Control.Monad (mzero)
import Control.Monad.Reader (ask, liftIO)
import qualified Data.Aeson as AE
import Data.Aeson ((.=), (.!=), (.:), (.:?))
import Data.Text (Text)
import qualified Database.Esqueleto as E
import Database.Esqueleto ((^.))
import qualified Database.Persist.Postgresql as DB
import Database.Types (WithDBPool (..))
import qualified Git.Git as Git
import Models
import ModelTypes (RepositoryState (..))
import qualified Products.Product as P
import System.Directory (doesDirectoryExist, createDirectoryIfMissing)
data ProductRepo =
ProductRepo { getProductId :: Maybe P.ProductID
, getProductName :: Text
, getProductRepoUrl :: Text
, getProductRepoState :: RepositoryState
, getProductRepoError :: Maybe Text
} deriving (Show, Eq)
instance AE.ToJSON ProductRepo where
toJSON (ProductRepo pId pName pRepoUrl pRepoState pRepoError) =
AE.object [ "productId" .= pId
, "productName" .= pName
, "repoUrl" .= pRepoUrl
, "repoState" .= pRepoState
, "repoError" .= pRepoError
]
instance AE.FromJSON ProductRepo where
parseJSON (AE.Object v) =
ProductRepo
<$> v .:? "productId" .!= Nothing
<*> v .: "productName"
<*> v .: "repoUrl"
<*> v .:? "repoState" .!= Unready
<*> v .:? "repoError" .!= Nothing
parseJSON _ = mzero
findProductRepos :: WithDBPool [ProductRepo]
findProductRepos = ask >>= \pool ->
(liftIO (DB.runSqlPool findProductReposQuery pool)) >>= (return . (fmap toProductRepo))
where
findProductReposQuery =
E.select $ E.from $ \(repoStatus `E.InnerJoin` prod) -> do
E.on $ repoStatus ^. RepositoryStatusProductId E.==. prod ^. ProductId
return (repoStatus, prod)
findProductRepo :: P.ProductID -> WithDBPool (Maybe ProductRepo)
findProductRepo prodID = ask >>= \pool ->
(liftIO (DB.runSqlPool findProductRepoQuery pool)) >>= \result ->
case result of
[] -> return Nothing
(x:_) -> return $ Just (toProductRepo x)
where
findProductRepoQuery =
E.select $ E.from $ \(repoStatus `E.InnerJoin` prod) -> do
E.on $ repoStatus ^. RepositoryStatusProductId E.==. prod ^. ProductId
E.on $ prod ^. ProductId E.==. (E.val (toKey prodID))
return (repoStatus, prod)
toProductRepo :: (E.Entity RepositoryStatus, E.Entity P.Product) -> ProductRepo
toProductRepo (rsEntity, prodEntity) =
ProductRepo { getProductId = Just $ P.toProductID $ prodEntity
, getProductName = productName . P.toProduct $ prodEntity
, getProductRepoUrl = productRepoUrl . P.toProduct $ prodEntity
, getProductRepoState = repositoryStatusState . toProductRepoStatus $ rsEntity
, getProductRepoError = repositoryStatusError . toProductRepoStatus $ rsEntity
}
toProductRepoStatus :: DB.Entity RepositoryStatus -> RepositoryStatus
toProductRepoStatus dbEntity = DB.entityVal dbEntity
-- (ReaderT GitConfig (WithErr a)) could be a useful monad here
updateRepo :: P.Product -> P.ProductID -> GitConfig -> WithErr String
updateRepo prod prodID gitConfig =
(liftIO $ createRequiredDirectories prodID gitConfig)
>> updateGitRepo (productRepoUrl prod) prodID gitConfig
-- (ReaderT GitConfig (WithErr a)) could be a useful monad here
updateGitRepo :: Text -> P.ProductID -> GitConfig -> WithErr String
updateGitRepo gitUrl prodID gitConfig = do
let repositoryPath = codeRepositoryDir prodID gitConfig
doesRepoExist <- liftIO $ doesDirectoryExist repositoryPath
case doesRepoExist of
True -> Git.pull repositoryPath
False -> Git.clone repositoryPath gitUrl
fetchRepo :: P.ProductID -> GitConfig -> WithErr String
fetchRepo prodID gitConfig =
let repositoryPath = codeRepositoryDir prodID gitConfig
in Git.fetch repositoryPath
getStatusDiff :: P.ProductID -> GitConfig -> WithErr String
getStatusDiff prodID gitConfig =
let repositoryPath = codeRepositoryDir prodID gitConfig
in Git.statusDiff repositoryPath
-- maybe the combination of a P.ProductID and GitConfig is a ProductRepository?
productDir :: P.ProductID -> GitConfig -> FilePath
productDir prodID gitConfig =
(repoBasePath gitConfig) ++ "/products/" ++ (show prodID)
-- maybe the combination of a P.ProductID and GitConfig is a ProductRepository?
codeRepositoryDir :: P.ProductID -> GitConfig -> FilePath
codeRepositoryDir prodID gitConfig =
productDir prodID gitConfig ++ "/repo"
-- maybe the combination of a P.ProductID and GitConfig is a ProductRepository?
createRequiredDirectories :: P.ProductID -> GitConfig -> IO ()
createRequiredDirectories prodID gitConfig =
createDirectoryIfMissing True (productDir prodID gitConfig)
| gust/feature-creature | legacy/lib/Products/ProductRepo.hs | mit | 5,227 | 0 | 18 | 1,128 | 1,310 | 710 | 600 | 106 | 2 |
import System.INotify
import Control.Concurrent (threadDelay)
import Control.Monad (forever)
import System.IO
digestsFilePath :: String
digestsFilePath = "/mnt/lpd-distlib/streamer/v1/digests.list"
doAddWatch :: INotify -> IO ()
doAddWatch ino = do
let modif = CloseWrite
-- Note: the callback to the provided function is sequential (synchronizedss)
handle <- openFile digestsFilePath ReadMode
_ <- addWatch ino [modif] digestsFilePath (func handle)
return ()
where
func handle evt = do
line <- hGetLine handle
-- putStrLn $ show evt
putStrLn line
-- threadDelay 100000
-- putStrLn "finished waiting.."
return ()
main = do
ino <- initINotify
doAddWatch ino
forever $ do
-- putStrLn "waiting for the worms to come.."
threadDelay 1000000 | adizere/nifty-tree | playground/inotify.hs | mit | 873 | 0 | 10 | 240 | 189 | 92 | 97 | 21 | 1 |
import Data.List
main :: IO ()
main = do
contents <- getContents
let threes = groupsOf 3 (map read $ lines contents)
roadSystem = map (\[a,b,c] -> Section a b c) threes
path = optimalPath roadSystem
pathString = concat $ map (show . fst) path
pathPrice = sum $ map snd path
putStrLn $ "The best path to take is: " ++ pathString
putStrLn $ "The price is: " ++ show pathPrice
data Section = Section { getA :: Int, getB :: Int, getC :: Int } deriving (Read, Show)
type RoadSystem = [Section]
data Label = A | B | C deriving (Read, Show)
type Path = [(Label, Int)]
roadStep :: (Path, Path) -> Section -> (Path, Path)
roadStep (pathA, pathB) (Section a b c) =
let priceA = sum $ map snd pathA
priceB = sum $ map snd pathB
forwardPriceToA = priceA + a
crossPriceToA = priceB + b + c
forwardPriceToB = priceB + b
crossPriceToB = priceA + a + c
newPathToA = if forwardPriceToA <= crossPriceToA
then (A,a):pathB
else (C,c):(B,b):pathB
newPathToB = if forwardPriceToB <= crossPriceToB
then (B,b):pathB
else (C,c):(A,a):pathA
in (newPathToA, newPathToB)
optimalPath :: RoadSystem -> Path
optimalPath roadSystem =
let (bestAPath, bestBPath) = foldl roadStep ([],[]) roadSystem
in if sum (map snd bestAPath) <= sum (map snd bestBPath)
then reverse bestAPath
else reverse bestBPath
groupsOf :: Int -> [a] -> [[a]]
groupsOf 0 _ = undefined
groupsOf _ [] = []
groupsOf n xs = take n xs : groupsOf n (drop n xs)
| friedbrice/Haskell | ch10/heathrow2.hs | gpl-2.0 | 1,637 | 0 | 13 | 494 | 642 | 346 | 296 | 40 | 3 |
{-#LANGUAGE GADTs #-}
{-#LANGUAGE TypeFamilies #-}
{-#LANGUAGE DataKinds #-}
{-#LANGUAGE KindSignatures #-}
{-#LANGUAGE StandaloneDeriving #-}
module PromotedDataTypes() where
-- FC-pro version - Value and type constructors
data Nat = Zero | Suc Nat
-- Indexed type family (type functions)
type family Plus (a :: Nat) (b :: Nat) :: Nat
type instance Plus Zero b = b
type instance Plus (Suc a) b = Suc (Plus a b)
-- are promoted to type and kind constructors respectively.
data Vec :: * -> Nat -> * where
Nil :: Vec a Zero
Cons :: a -> Vec a n -> Vec a (Suc n)
deriving instance Show a => Show (Vec a n)
concatVec :: Vec a (n :: Nat) -> Vec a (m::Nat) -> Vec a (Plus n m)
concatVec Nil v2 = v2
concatVec (Cons a v1) v2 = Cons a $ concatVec v1 v2
lengthVec :: Vec a (n :: Nat) -> Int
lengthVec Nil = 0
lengthVec (Cons a v1) = (+) 1 (lengthVec v1)
| krakrjak/haskell-playground | TypeFunctions/PromotedDataTypes.hs | gpl-2.0 | 871 | 0 | 10 | 194 | 312 | 170 | 142 | 20 | 1 |
{-|
Module : Bench
Description : Memory benchmark of Multilinear library
Copyright : (c) Artur M. Brodzki, 2018
License : BSD3
Maintainer : [email protected]
Stability : experimental
Portability : Windows/POSIX
-}
module Main (
main
) where
import Weigh
import Multilinear.Generic.GPU
import qualified Multilinear.Matrix as Matrix
import qualified Multilinear.Vector as Vector
-- | Simple generator function for benchmarked matrices
gen :: Int -> Int -> Double
gen j k = sin (fromIntegral j) + cos (fromIntegral k)
-- matrix sizes
s1 :: Int
s1 = 64
s2 :: Int
s2 = 256
s3 :: Int
s3 = 1024
-- | ENTRY POINT
main :: IO ()
main = mainWith (do
setColumns [Case, Allocated, GCs, Live, Max]
-- Benchmarking small vectors
value "vector 1 elem generation" (Vector.fromIndices "i" 1 fromIntegral :: Tensor Double)
value "vector 2 elem generation" (Vector.fromIndices "i" 2 fromIntegral :: Tensor Double)
value "vector 3 elem generation" (Vector.fromIndices "i" 3 fromIntegral :: Tensor Double)
-- Benchmarking matrix generators
value "matrix 64 x 64 generation"
(Matrix.fromIndices "ij" s1 s1 gen :: Tensor Double)
value "matrix 256 x 256 generation"
(Matrix.fromIndices "ij" s2 s2 gen :: Tensor Double)
value "matrix 1024 x 1024 generation"
(Matrix.fromIndices "ij" s3 s3 gen :: Tensor Double)
-- Benchmarking matrix addition
func "matrix 64 x 64 addition"
(+ Matrix.fromIndices "ab" s1 s1 gen)
(Matrix.fromIndices "ab" s1 s1 (\a b -> fromIntegral a + fromIntegral b) :: Tensor Double)
func "matrix 256 x 256 addition"
(+ Matrix.fromIndices "ab" s2 s2 gen)
(Matrix.fromIndices "ab" s2 s2 (\a b -> fromIntegral a + fromIntegral b) :: Tensor Double)
func "matrix 1024 x 1024 addition"
(+ Matrix.fromIndices "ab" s3 s3 gen)
(Matrix.fromIndices "ab" s3 s3 (\a b -> fromIntegral a + fromIntegral b) :: Tensor Double)
-- Benchmarking matrix multiplication
func "matrix 40 x 4,000 multiplication"
(* Matrix.fromIndices "jk" 4000 40 gen)
(Matrix.fromIndices "ij" 40 4000 gen :: Tensor Double)
func "matrix 40 x 16,000 multiplication"
(* Matrix.fromIndices "jk" 16000 40 gen)
(Matrix.fromIndices "ij" 40 16000 gen :: Tensor Double)
func "matrix 40 x 64,000 multiplication"
(* Matrix.fromIndices "jk" 64000 40 gen)
(Matrix.fromIndices "ij" 40 64000 gen :: Tensor Double)
)
| ArturB/Multilinear | benchmark/gpu/memory/Bench.hs | gpl-3.0 | 2,501 | 0 | 15 | 599 | 651 | 332 | 319 | 44 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Model.EventMissile
( EventMissile (..)
) where
import Data.Aeson
import GHC.Generics
import Test.QuickCheck
import Model.Event
import Model.Number
import Model.RobotInfo
data EventMissile = EventMissile
{ eventType :: Number
, activationTime :: Float
, robot :: RobotInfo
, direction :: Float
, distance :: Float
, speed :: Float
} deriving (Show, Eq, Generic)
instance FromJSON EventMissile
instance ToJSON EventMissile
instance Arbitrary EventMissile where
arbitrary = EventMissile <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
| massimo-zaniboni/netrobots | robot_examples/haskell-servant/rest_api/lib/Model/EventMissile.hs | gpl-3.0 | 788 | 0 | 11 | 142 | 161 | 95 | 66 | 25 | 0 |
{---------------------------------------------------------------------}
{- Copyright 2015, 2016 Nathan Bloomfield -}
{- -}
{- This file is part of Feivel. -}
{- -}
{- Feivel is free software: you can redistribute it and/or modify -}
{- it under the terms of the GNU General Public License version 3, -}
{- as published by the Free Software Foundation. -}
{- -}
{- Feivel is distributed in the hope that it will be useful, but -}
{- WITHOUT ANY WARRANTY; without even the implied warranty of -}
{- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -}
{- GNU General Public License for more details. -}
{- -}
{- You should have received a copy of the GNU General Public License -}
{- along with Feivel. If not, see <http://www.gnu.org/licenses/>. -}
{---------------------------------------------------------------------}
module Feivel.Grammar.Doc where
import Feivel.Grammar.Util
data DocLeaf a doc str
-- Primitives
= Empty
| DocText Text
| Escaped Char
| Scope doc
| NakedKey Key
| NakedExpr a -- XX
| Import String (Maybe String) doc
| DocMacro [(Type, Key, a)] a -- XX, MacTo DD
-- Combination
| Cat [doc]
| CatPar [doc]
| Alt [doc]
| Shuffle [doc]
-- Flow Control
| IfThenElse a doc doc -- BB
| Cond [(a, doc)] doc -- BB
-- Binding
| LetIn Key a doc -- XX
| Define Type Key a doc -- XX
-- Selection and Repetition
| ForSay Key a doc (Maybe doc) -- ListOf XX
| Select Key a doc -- ListOf XX
-- Shell Command
| Shell String [str] (Maybe doc)
-- Debugging
| Bail a -- SS
| ShowState
deriving (Eq, Show)
| nbloomf/feivel | src/Feivel/Grammar/Doc.hs | gpl-3.0 | 1,994 | 0 | 8 | 745 | 255 | 165 | 90 | 25 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.DynamicTargetingKeys.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of dynamic targeting keys.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.dynamicTargetingKeys.list@.
module Network.Google.Resource.DFAReporting.DynamicTargetingKeys.List
(
-- * REST Resource
DynamicTargetingKeysListResource
-- * Creating a Request
, dynamicTargetingKeysList
, DynamicTargetingKeysList
-- * Request Lenses
, dtklXgafv
, dtklUploadProtocol
, dtklObjectType
, dtklAccessToken
, dtklAdvertiserId
, dtklObjectId
, dtklUploadType
, dtklProFileId
, dtklNames
, dtklCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.dynamicTargetingKeys.list@ method which the
-- 'DynamicTargetingKeysList' request conforms to.
type DynamicTargetingKeysListResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"dynamicTargetingKeys" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "objectType"
DynamicTargetingKeysListObjectType
:>
QueryParam "access_token" Text :>
QueryParam "advertiserId" (Textual Int64) :>
QueryParam "objectId" (Textual Int64) :>
QueryParam "uploadType" Text :>
QueryParams "names" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] DynamicTargetingKeysListResponse
-- | Retrieves a list of dynamic targeting keys.
--
-- /See:/ 'dynamicTargetingKeysList' smart constructor.
data DynamicTargetingKeysList =
DynamicTargetingKeysList'
{ _dtklXgafv :: !(Maybe Xgafv)
, _dtklUploadProtocol :: !(Maybe Text)
, _dtklObjectType :: !(Maybe DynamicTargetingKeysListObjectType)
, _dtklAccessToken :: !(Maybe Text)
, _dtklAdvertiserId :: !(Maybe (Textual Int64))
, _dtklObjectId :: !(Maybe (Textual Int64))
, _dtklUploadType :: !(Maybe Text)
, _dtklProFileId :: !(Textual Int64)
, _dtklNames :: !(Maybe [Text])
, _dtklCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'DynamicTargetingKeysList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dtklXgafv'
--
-- * 'dtklUploadProtocol'
--
-- * 'dtklObjectType'
--
-- * 'dtklAccessToken'
--
-- * 'dtklAdvertiserId'
--
-- * 'dtklObjectId'
--
-- * 'dtklUploadType'
--
-- * 'dtklProFileId'
--
-- * 'dtklNames'
--
-- * 'dtklCallback'
dynamicTargetingKeysList
:: Int64 -- ^ 'dtklProFileId'
-> DynamicTargetingKeysList
dynamicTargetingKeysList pDtklProFileId_ =
DynamicTargetingKeysList'
{ _dtklXgafv = Nothing
, _dtklUploadProtocol = Nothing
, _dtklObjectType = Nothing
, _dtklAccessToken = Nothing
, _dtklAdvertiserId = Nothing
, _dtklObjectId = Nothing
, _dtklUploadType = Nothing
, _dtklProFileId = _Coerce # pDtklProFileId_
, _dtklNames = Nothing
, _dtklCallback = Nothing
}
-- | V1 error format.
dtklXgafv :: Lens' DynamicTargetingKeysList (Maybe Xgafv)
dtklXgafv
= lens _dtklXgafv (\ s a -> s{_dtklXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
dtklUploadProtocol :: Lens' DynamicTargetingKeysList (Maybe Text)
dtklUploadProtocol
= lens _dtklUploadProtocol
(\ s a -> s{_dtklUploadProtocol = a})
-- | Select only dynamic targeting keys with this object type.
dtklObjectType :: Lens' DynamicTargetingKeysList (Maybe DynamicTargetingKeysListObjectType)
dtklObjectType
= lens _dtklObjectType
(\ s a -> s{_dtklObjectType = a})
-- | OAuth access token.
dtklAccessToken :: Lens' DynamicTargetingKeysList (Maybe Text)
dtklAccessToken
= lens _dtklAccessToken
(\ s a -> s{_dtklAccessToken = a})
-- | Select only dynamic targeting keys whose object has this advertiser ID.
dtklAdvertiserId :: Lens' DynamicTargetingKeysList (Maybe Int64)
dtklAdvertiserId
= lens _dtklAdvertiserId
(\ s a -> s{_dtklAdvertiserId = a})
. mapping _Coerce
-- | Select only dynamic targeting keys with this object ID.
dtklObjectId :: Lens' DynamicTargetingKeysList (Maybe Int64)
dtklObjectId
= lens _dtklObjectId (\ s a -> s{_dtklObjectId = a})
. mapping _Coerce
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
dtklUploadType :: Lens' DynamicTargetingKeysList (Maybe Text)
dtklUploadType
= lens _dtklUploadType
(\ s a -> s{_dtklUploadType = a})
-- | User profile ID associated with this request.
dtklProFileId :: Lens' DynamicTargetingKeysList Int64
dtklProFileId
= lens _dtklProFileId
(\ s a -> s{_dtklProFileId = a})
. _Coerce
-- | Select only dynamic targeting keys exactly matching these names.
dtklNames :: Lens' DynamicTargetingKeysList [Text]
dtklNames
= lens _dtklNames (\ s a -> s{_dtklNames = a}) .
_Default
. _Coerce
-- | JSONP
dtklCallback :: Lens' DynamicTargetingKeysList (Maybe Text)
dtklCallback
= lens _dtklCallback (\ s a -> s{_dtklCallback = a})
instance GoogleRequest DynamicTargetingKeysList where
type Rs DynamicTargetingKeysList =
DynamicTargetingKeysListResponse
type Scopes DynamicTargetingKeysList =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient DynamicTargetingKeysList'{..}
= go _dtklProFileId _dtklXgafv _dtklUploadProtocol
_dtklObjectType
_dtklAccessToken
_dtklAdvertiserId
_dtklObjectId
_dtklUploadType
(_dtklNames ^. _Default)
_dtklCallback
(Just AltJSON)
dFAReportingService
where go
= buildClient
(Proxy :: Proxy DynamicTargetingKeysListResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/DynamicTargetingKeys/List.hs | mpl-2.0 | 6,970 | 0 | 22 | 1,696 | 1,099 | 628 | 471 | 157 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.ContainerBuilder
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Builds container images in the cloud.
--
-- /See:/ <https://cloud.google.com/container-builder/docs/ Google Cloud Container Builder API Reference>
module Network.Google.ContainerBuilder
(
-- * Service Configuration
containerBuilderService
-- * OAuth Scopes
, cloudPlatformScope
-- * API Declaration
, ContainerBuilderAPI
-- * Resources
-- ** cloudbuild.operations.cancel
, module Network.Google.Resource.Cloudbuild.Operations.Cancel
-- ** cloudbuild.operations.get
, module Network.Google.Resource.Cloudbuild.Operations.Get
-- ** cloudbuild.operations.list
, module Network.Google.Resource.Cloudbuild.Operations.List
-- ** cloudbuild.projects.builds.cancel
, module Network.Google.Resource.Cloudbuild.Projects.Builds.Cancel
-- ** cloudbuild.projects.builds.create
, module Network.Google.Resource.Cloudbuild.Projects.Builds.Create
-- ** cloudbuild.projects.builds.get
, module Network.Google.Resource.Cloudbuild.Projects.Builds.Get
-- ** cloudbuild.projects.builds.list
, module Network.Google.Resource.Cloudbuild.Projects.Builds.List
-- ** cloudbuild.projects.triggers.create
, module Network.Google.Resource.Cloudbuild.Projects.Triggers.Create
-- ** cloudbuild.projects.triggers.delete
, module Network.Google.Resource.Cloudbuild.Projects.Triggers.Delete
-- ** cloudbuild.projects.triggers.get
, module Network.Google.Resource.Cloudbuild.Projects.Triggers.Get
-- ** cloudbuild.projects.triggers.list
, module Network.Google.Resource.Cloudbuild.Projects.Triggers.List
-- ** cloudbuild.projects.triggers.patch
, module Network.Google.Resource.Cloudbuild.Projects.Triggers.Patch
-- * Types
-- ** BuildStep
, BuildStep
, buildStep
, bsDir
, bsArgs
, bsEnv
, bsEntrypoint
, bsWaitFor
, bsName
, bsId
-- ** SourceProvenance
, SourceProvenance
, sourceProvenance
, spResolvedRepoSource
, spResolvedStorageSource
, spFileHashes
-- ** ListBuildsResponse
, ListBuildsResponse
, listBuildsResponse
, lbrNextPageToken
, lbrBuilds
-- ** Status
, Status
, status
, sDetails
, sCode
, sMessage
-- ** ListOperationsResponse
, ListOperationsResponse
, listOperationsResponse
, lorNextPageToken
, lorOperations
-- ** CancelOperationRequest
, CancelOperationRequest
, cancelOperationRequest
-- ** Hash
, Hash
, hash
, hValue
, hType
-- ** Results
, Results
, results
, rImages
, rBuildStepImages
-- ** RepoSource
, RepoSource
, repoSource
, rsRepoName
, rsCommitSha
, rsBranchName
, rsTagName
, rsProjectId
-- ** Operation
, Operation
, operation
, oDone
, oError
, oResponse
, oName
, oMetadata
-- ** Empty
, Empty
, empty
-- ** StatusDetailsItem
, StatusDetailsItem
, statusDetailsItem
, sdiAddtional
-- ** Build
, Build
, build
, bImages
, bStatus
, bSourceProvenance
, bLogURL
, bResults
, bStartTime
, bLogsBucket
, bSteps
, bStatusDetail
, bSource
, bId
, bOptions
, bProjectId
, bBuildTriggerId
, bTimeout
, bFinishTime
, bCreateTime
-- ** SourceProvenanceFileHashes
, SourceProvenanceFileHashes
, sourceProvenanceFileHashes
, spfhAddtional
-- ** CancelBuildRequest
, CancelBuildRequest
, cancelBuildRequest
-- ** StorageSource
, StorageSource
, storageSource
, ssBucket
, ssObject
, ssGeneration
-- ** ListBuildTriggersResponse
, ListBuildTriggersResponse
, listBuildTriggersResponse
, lbtrTriggers
-- ** BuildOptionsRequestedVerifyOption
, BuildOptionsRequestedVerifyOption (..)
-- ** FileHashes
, FileHashes
, fileHashes
, fhFileHash
-- ** Xgafv
, Xgafv (..)
-- ** BuildStatus
, BuildStatus (..)
-- ** HashType
, HashType (..)
-- ** Source
, Source
, source
, sRepoSource
, sStorageSource
-- ** OperationMetadata
, OperationMetadata
, operationMetadata
, omAddtional
-- ** BuildOperationMetadata
, BuildOperationMetadata
, buildOperationMetadata
, bomBuild
-- ** BuildOptions
, BuildOptions
, buildOptions
, boRequestedVerifyOption
, boSourceProvenanceHash
-- ** OperationResponse
, OperationResponse
, operationResponse
, orAddtional
-- ** BuildTrigger
, BuildTrigger
, buildTrigger
, btDisabled
, btTriggerTemplate
, btBuild
, btId
, btDescription
, btFilename
, btCreateTime
-- ** BuiltImage
, BuiltImage
, builtImage
, biName
, biDigest
) where
import Network.Google.ContainerBuilder.Types
import Network.Google.Prelude
import Network.Google.Resource.Cloudbuild.Operations.Cancel
import Network.Google.Resource.Cloudbuild.Operations.Get
import Network.Google.Resource.Cloudbuild.Operations.List
import Network.Google.Resource.Cloudbuild.Projects.Builds.Cancel
import Network.Google.Resource.Cloudbuild.Projects.Builds.Create
import Network.Google.Resource.Cloudbuild.Projects.Builds.Get
import Network.Google.Resource.Cloudbuild.Projects.Builds.List
import Network.Google.Resource.Cloudbuild.Projects.Triggers.Create
import Network.Google.Resource.Cloudbuild.Projects.Triggers.Delete
import Network.Google.Resource.Cloudbuild.Projects.Triggers.Get
import Network.Google.Resource.Cloudbuild.Projects.Triggers.List
import Network.Google.Resource.Cloudbuild.Projects.Triggers.Patch
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Google Cloud Container Builder API service.
type ContainerBuilderAPI =
OperationsListResource :<|> OperationsGetResource
:<|> OperationsCancelResource
:<|> ProjectsBuildsListResource
:<|> ProjectsBuildsGetResource
:<|> ProjectsBuildsCreateResource
:<|> ProjectsBuildsCancelResource
:<|> ProjectsTriggersListResource
:<|> ProjectsTriggersPatchResource
:<|> ProjectsTriggersGetResource
:<|> ProjectsTriggersCreateResource
:<|> ProjectsTriggersDeleteResource
| rueshyna/gogol | gogol-containerbuilder/gen/Network/Google/ContainerBuilder.hs | mpl-2.0 | 6,936 | 0 | 15 | 1,641 | 782 | 571 | 211 | 173 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.IdentityToolkit.Types
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
module Network.Google.IdentityToolkit.Types
(
-- * Service Configuration
identityToolkitService
-- * OAuth Scopes
, firebaseScope
, cloudPlatformScope
-- * UploadAccountResponseErrorItem
, UploadAccountResponseErrorItem
, uploadAccountResponseErrorItem
, uareiMessage
, uareiIndex
-- * UserInfoProviderUserInfoItem
, UserInfoProviderUserInfoItem
, userInfoProviderUserInfoItem
, uipuiiProviderId
, uipuiiEmail
, uipuiiPhotoURL
, uipuiiFederatedId
, uipuiiDisplayName
, uipuiiScreenName
, uipuiiRawId
-- * VerifyCustomTokenResponse
, VerifyCustomTokenResponse
, verifyCustomTokenResponse
, vctrKind
, vctrRefreshToken
, vctrExpiresIn
, vctrIdToken
-- * IdpConfig
, IdpConfig
, idpConfig
, icClientId
, icEnabled
, icWhiteListedAudiences
, icSecret
, icExperimentPercent
, icProvider
-- * UserInfo
, UserInfo
, userInfo
, uiEmail
, uiLastLoginAt
, uiPhotoURL
, uiCreatedAt
, uiDisabled
, uiCustomAuth
, uiProviderUserInfo
, uiValidSince
, uiPasswordUpdatedAt
, uiVersion
, uiEmailVerified
, uiSalt
, uiDisplayName
, uiPasswordHash
, uiLocalId
, uiRawPassword
, uiScreenName
-- * IdentitytoolkitRelyingPartySetProjectConfigResponse
, IdentitytoolkitRelyingPartySetProjectConfigResponse
, identitytoolkitRelyingPartySetProjectConfigResponse
, irpspcrProjectId
-- * IdentitytoolkitRelyingPartyVerifyCustomTokenRequest
, IdentitytoolkitRelyingPartyVerifyCustomTokenRequest
, identitytoolkitRelyingPartyVerifyCustomTokenRequest
, irpvctrInstanceId
, irpvctrDelegatedProjectNumber
, irpvctrToken
, irpvctrReturnSecureToken
-- * SetAccountInfoResponseProviderUserInfoItem
, SetAccountInfoResponseProviderUserInfoItem
, setAccountInfoResponseProviderUserInfoItem
, sairpuiiProviderId
, sairpuiiPhotoURL
, sairpuiiFederatedId
, sairpuiiDisplayName
-- * IdentitytoolkitRelyingPartyCreateAuthURIRequestCustomParameter
, IdentitytoolkitRelyingPartyCreateAuthURIRequestCustomParameter
, identitytoolkitRelyingPartyCreateAuthURIRequestCustomParameter
, irpcaurcpAddtional
-- * IdentitytoolkitRelyingPartyVerifyPasswordRequest
, IdentitytoolkitRelyingPartyVerifyPasswordRequest
, identitytoolkitRelyingPartyVerifyPasswordRequest
, irpvprEmail
, irpvprInstanceId
, irpvprCaptchaChallenge
, irpvprDelegatedProjectNumber
, irpvprReturnSecureToken
, irpvprPassword
, irpvprCaptchaResponse
, irpvprIdToken
, irpvprPendingIdToken
-- * SignupNewUserResponse
, SignupNewUserResponse
, signupNewUserResponse
, snurEmail
, snurKind
, snurRefreshToken
, snurExpiresIn
, snurDisplayName
, snurLocalId
, snurIdToken
-- * IdentitytoolkitRelyingPartySetProjectConfigRequest
, IdentitytoolkitRelyingPartySetProjectConfigRequest
, identitytoolkitRelyingPartySetProjectConfigRequest
, irpspcrAuthorizedDomains
, irpspcrAPIKey
, irpspcrIdpConfig
, irpspcrChangeEmailTemplate
, irpspcrDelegatedProjectNumber
, irpspcrVerifyEmailTemplate
, irpspcrEnableAnonymousUser
, irpspcrLegacyResetPasswordTemplate
, irpspcrAllowPasswordUser
, irpspcrResetPasswordTemplate
, irpspcrUseEmailSending
-- * IdentitytoolkitRelyingPartySetAccountInfoRequest
, IdentitytoolkitRelyingPartySetAccountInfoRequest
, identitytoolkitRelyingPartySetAccountInfoRequest
, irpsairUpgradeToFederatedLogin
, irpsairEmail
, irpsairInstanceId
, irpsairLastLoginAt
, irpsairPhotoURL
, irpsairCaptchaChallenge
, irpsairCreatedAt
, irpsairDelegatedProjectNumber
, irpsairDeleteAttribute
, irpsairDeleteProvider
, irpsairReturnSecureToken
, irpsairValidSince
, irpsairOOBCode
, irpsairPassword
, irpsairCaptchaResponse
, irpsairEmailVerified
, irpsairDisplayName
, irpsairDisableUser
, irpsairLocalId
, irpsairIdToken
, irpsairProvider
-- * IdentitytoolkitRelyingPartyVerifyAssertionRequest
, IdentitytoolkitRelyingPartyVerifyAssertionRequest
, identitytoolkitRelyingPartyVerifyAssertionRequest
, irpvarReturnIdpCredential
, irpvarInstanceId
, irpvarDelegatedProjectNumber
, irpvarPostBody
, irpvarReturnSecureToken
, irpvarReturnRefreshToken
, irpvarRequestURI
, irpvarSessionId
, irpvarIdToken
, irpvarPendingIdToken
-- * DeleteAccountResponse
, DeleteAccountResponse
, deleteAccountResponse
, darKind
-- * IdentitytoolkitRelyingPartySignOutUserResponse
, IdentitytoolkitRelyingPartySignOutUserResponse
, identitytoolkitRelyingPartySignOutUserResponse
, irpsourLocalId
-- * DownloadAccountResponse
, DownloadAccountResponse
, downloadAccountResponse
, dNextPageToken
, dUsers
, dKind
-- * IdentitytoolkitRelyingPartyGetProjectConfigResponse
, IdentitytoolkitRelyingPartyGetProjectConfigResponse
, identitytoolkitRelyingPartyGetProjectConfigResponse
, irpgpcrAuthorizedDomains
, irpgpcrAPIKey
, irpgpcrIdpConfig
, irpgpcrChangeEmailTemplate
, irpgpcrVerifyEmailTemplate
, irpgpcrEnableAnonymousUser
, irpgpcrLegacyResetPasswordTemplate
, irpgpcrAllowPasswordUser
, irpgpcrResetPasswordTemplate
, irpgpcrProjectId
, irpgpcrUseEmailSending
, irpgpcrDynamicLinksDomain
-- * ResetPasswordResponse
, ResetPasswordResponse
, resetPasswordResponse
, rprEmail
, rprKind
, rprRequestType
, rprNewEmail
-- * UploadAccountResponse
, UploadAccountResponse
, uploadAccountResponse
, uarKind
, uarError
-- * CreateAuthURIResponse
, CreateAuthURIResponse
, createAuthURIResponse
, caurProviderId
, caurKind
, caurAllProviders
, caurAuthURI
, caurCaptchaRequired
, caurRegistered
, caurSessionId
, caurForExistingProvider
-- * IdentitytoolkitRelyingPartyGetPublicKeysResponse
, IdentitytoolkitRelyingPartyGetPublicKeysResponse
, identitytoolkitRelyingPartyGetPublicKeysResponse
, irpgpkrAddtional
-- * RelyingParty
, RelyingParty
, relyingParty
, rpEmail
, rpKind
, rpUserIP
, rpRequestType
, rpCaptchaResp
, rpNewEmail
, rpChallenge
, rpIdToken
-- * IdentitytoolkitRelyingPartyGetAccountInfoRequest
, IdentitytoolkitRelyingPartyGetAccountInfoRequest
, identitytoolkitRelyingPartyGetAccountInfoRequest
, irpgairEmail
, irpgairDelegatedProjectNumber
, irpgairLocalId
, irpgairIdToken
-- * EmailTemplate
, EmailTemplate
, emailTemplate
, etSubject
, etBody
, etFormat
, etFromDisplayName
, etFrom
, etReplyTo
-- * IdentitytoolkitRelyingPartyUploadAccountRequest
, IdentitytoolkitRelyingPartyUploadAccountRequest
, identitytoolkitRelyingPartyUploadAccountRequest
, irpuarUsers
, irpuarMemoryCost
, irpuarAllowOverwrite
, irpuarDelegatedProjectNumber
, irpuarSanityCheck
, irpuarSaltSeparator
, irpuarHashAlgorithm
, irpuarSignerKey
, irpuarRounds
, irpuarTargetProjectId
-- * IdentitytoolkitRelyingPartyResetPasswordRequest
, IdentitytoolkitRelyingPartyResetPasswordRequest
, identitytoolkitRelyingPartyResetPasswordRequest
, irprprEmail
, irprprNewPassword
, irprprOOBCode
, irprprOldPassword
-- * IdentitytoolkitRelyingPartyCreateAuthURIRequest
, IdentitytoolkitRelyingPartyCreateAuthURIRequest
, identitytoolkitRelyingPartyCreateAuthURIRequest
, irpcaurProviderId
, irpcaurClientId
, irpcaurContext
, irpcaurCustomParameter
, irpcaurIdentifier
, irpcaurOtaApp
, irpcaurOAuthConsumerKey
, irpcaurHostedDomain
, irpcaurAppId
, irpcaurContinueURI
, irpcaurAuthFlowType
, irpcaurOAuthScope
, irpcaurSessionId
, irpcaurOpenidRealm
-- * GetAccountInfoResponse
, GetAccountInfoResponse
, getAccountInfoResponse
, gairUsers
, gairKind
-- * IdentitytoolkitRelyingPartyDeleteAccountRequest
, IdentitytoolkitRelyingPartyDeleteAccountRequest
, identitytoolkitRelyingPartyDeleteAccountRequest
, irpdarDelegatedProjectNumber
, irpdarLocalId
, irpdarIdToken
-- * GetOOBConfirmationCodeResponse
, GetOOBConfirmationCodeResponse
, getOOBConfirmationCodeResponse
, goobccrEmail
, goobccrKind
, goobccrOOBCode
-- * IdentitytoolkitRelyingPartyDownloadAccountRequest
, IdentitytoolkitRelyingPartyDownloadAccountRequest
, identitytoolkitRelyingPartyDownloadAccountRequest
, iNextPageToken
, iDelegatedProjectNumber
, iMaxResults
, iTargetProjectId
-- * VerifyPasswordResponse
, VerifyPasswordResponse
, verifyPasswordResponse
, vprEmail
, vprPhotoURL
, vprOAuthAccessToken
, vprKind
, vprOAuthExpireIn
, vprRefreshToken
, vprExpiresIn
, vprDisplayName
, vprLocalId
, vprRegistered
, vprIdToken
, vprOAuthAuthorizationCode
-- * SetAccountInfoResponse
, SetAccountInfoResponse
, setAccountInfoResponse
, sairEmail
, sairPhotoURL
, sairKind
, sairRefreshToken
, sairProviderUserInfo
, sairExpiresIn
, sairDisplayName
, sairPasswordHash
, sairLocalId
, sairNewEmail
, sairIdToken
-- * IdentitytoolkitRelyingPartySignupNewUserRequest
, IdentitytoolkitRelyingPartySignupNewUserRequest
, identitytoolkitRelyingPartySignupNewUserRequest
, irpsnurEmail
, irpsnurInstanceId
, irpsnurPhotoURL
, irpsnurCaptchaChallenge
, irpsnurDisabled
, irpsnurPassword
, irpsnurCaptchaResponse
, irpsnurEmailVerified
, irpsnurDisplayName
, irpsnurIdToken
-- * VerifyAssertionResponse
, VerifyAssertionResponse
, verifyAssertionResponse
, varProviderId
, varFullName
, varEmail
, varEmailRecycled
, varPhotoURL
, varVerifiedProvider
, varContext
, varNeedConfirmation
, varOriginalEmail
, varLastName
, varOAuthAccessToken
, varDateOfBirth
, varKind
, varRawUserInfo
, varOAuthExpireIn
, varRefreshToken
, varAppInstallationURL
, varAction
, varNeedEmail
, varFederatedId
, varOAuthIdToken
, varAppScheme
, varExpiresIn
, varInputEmail
, varEmailVerified
, varOAuthTokenSecret
, varLanguage
, varFirstName
, varDisplayName
, varOAuthRequestToken
, varOAuthScope
, varNickName
, varLocalId
, varTimeZone
, varScreenName
, varErrorMessage
, varIdToken
, varOAuthAuthorizationCode
-- * IdentitytoolkitRelyingPartySignOutUserRequest
, IdentitytoolkitRelyingPartySignOutUserRequest
, identitytoolkitRelyingPartySignOutUserRequest
, iInstanceId
, iLocalId
-- * GetRecaptchaParamResponse
, GetRecaptchaParamResponse
, getRecaptchaParamResponse
, grprRecaptchaSiteKey
, grprKind
, grprRecaptchaStoken
) where
import Network.Google.IdentityToolkit.Types.Product
import Network.Google.IdentityToolkit.Types.Sum
import Network.Google.Prelude
-- | Default request referring to version 'v3' of the Google Identity Toolkit API. This contains the host and root path used as a starting point for constructing service requests.
identityToolkitService :: ServiceConfig
identityToolkitService
= defaultService (ServiceId "identitytoolkit:v3")
"www.googleapis.com"
-- | View and administer all your Firebase data and settings
firebaseScope :: Proxy '["https://www.googleapis.com/auth/firebase"]
firebaseScope = Proxy;
-- | View and manage your data across Google Cloud Platform services
cloudPlatformScope :: Proxy '["https://www.googleapis.com/auth/cloud-platform"]
cloudPlatformScope = Proxy;
| rueshyna/gogol | gogol-identity-toolkit/gen/Network/Google/IdentityToolkit/Types.hs | mpl-2.0 | 12,500 | 0 | 7 | 2,647 | 1,195 | 811 | 384 | 368 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE DataKinds #-}
module Gpg where
import Control.Applicative
import qualified Control.Exception as Ex
import Control.Monad
import Control.Monad.Reader
import DBus
import qualified DBus.Types as DBus
import Data.ByteString (ByteString)
import qualified Data.ByteString as BS
import Data.Maybe
import Data.Monoid
import qualified Data.Text.Encoding as Text
import qualified GpgMe as Gpg
import qualified Network.Xmpp as Xmpp
import System.Log.Logger
--import Network.Xmpp.E2E
import Base
import Persist
import Types
mkKeyRSA :: String -> String
mkKeyRSA name = unlines $
[ "<GnupgKeyParms format=\"internal\">"
, "Key-Type: RSA"
, "Key-Length: 4096"
, "Key-Usage: sign, auth"
, "Expire-Date: 0"
, "Name-Real: " ++ name
, "</GnupgKeyParms>"
]
pontariusKeyName :: String
pontariusKeyName = "Pontarius-Service"
newGpgKey :: IO BS.ByteString
newGpgKey = do
ctx <- Gpg.ctxNew Nothing
Just kid <- Gpg.genKeyFingerprint <$>
Gpg.genKey ctx (mkKeyRSA $ pontariusKeyName)
return kid
fromKeyID :: KeyID -> BS.ByteString
fromKeyID = Text.encodeUtf8
toKeyID :: BS.ByteString -> KeyID
toKeyID = Text.decodeUtf8
-- revokeIdentity :: MonadIO m => KeyID -> MethodHandlerT m ()
-- revokeIdentity keyID = do
-- let text = "" :: Text
-- reason = Gpg.NoReason
-- ctx <- liftIO $ Gpg.ctxNew Nothing
-- keys <- liftIO $ Gpg.findKeyBy ctx True Gpg.keyFingerprint
-- (Just $ fromKeyID keyID)
-- case keys of
-- [key] -> liftIO $ Gpg.revoke ctx key reason text >> return ()
-- [] -> DBus.methodError $
-- MsgError{ errorName = "org.pontarius.Error.Revoke"
-- , errorText = Just "Key not found"
-- , errorBody = []
-- }
-- _ -> DBus.methodError $
-- MsgError{ errorName = "org.pontarius.Error.Revoke"
-- , errorText = Just "Key not unique"
-- , errorBody = []
-- }
-- return ()
setSigningGpgKey :: PSState -> KeyID -> IO Bool
setSigningGpgKey st keyID = do
let keyFpr = fromKeyID keyID
ctx <- Gpg.ctxNew Nothing
keys <- Gpg.getKeys ctx True
matches <- filterM (liftM (== Just keyFpr) . Gpg.keyFingerprint) keys
haveKey <- case matches of
[] -> return False
(_:_) -> return True
runPSM st . when haveKey $ setSigningKey "gpg" (toKeyID keyFpr)
return haveKey
getSigningPgpKey :: PSState -> DBus.MethodHandlerT IO KeyID
getSigningPgpKey st = do
pIdent <- (runPSM st $ getSigningKey) >>= \case
Just pi -> return pi
Nothing -> do
DBus.methodError $
MsgError{ errorName = "org.pontarius.Error.Sign"
, errorText = Just "No signing key found"
, errorBody = []
}
case privIdentKeyBackend pIdent of
"gpg" -> return $ privIdentKeyID pIdent
backend -> DBus.methodError $
MsgError { errorName = "org.pontarius.Error.Sign"
, errorText = Just $ "Unknown key backend " <> backend
, errorBody = []
}
identityProp :: PSState -> Property ('DBusSimpleType 'TypeString)
identityProp st =
mkProperty pontariusObjectPath pontariusInterface "Identity"
(Just $ getSigningPgpKey st) Nothing
PECSTrue
--setSigningKey st keyFpr
getIdentities :: IO [KeyID]
getIdentities = do
ctx <- Gpg.ctxNew Nothing
keys <- Gpg.getKeys ctx True
map toKeyID . catMaybes <$> mapM Gpg.keyFingerprint keys
-- | Get all available private keys
getIdentitiesMethod :: Method
getIdentitiesMethod =
DBus.Method
(DBus.repMethod $ (getIdentities :: IO [KeyID] ))
"getIdentities"
Done
("identities" -- ^ List of keyIDs
:> Done)
importKey :: MonadIO m => ByteString -> PSM m [ByteString]
importKey key = do
ctx <- liftIO $ Gpg.ctxNew Nothing
importResults <- liftIO $ Gpg.importKeys ctx key
liftM catMaybes $ forM importResults $ \res ->
case Gpg.isResult res of
Nothing -> do
let fPrint = Gpg.isFprint res
-- addPeerKey st peer (PubKey "gpg" fPrint)
return $ Just fPrint
Just err -> do
liftIO . errorM "Pontarius.Xmpp" $ "error while importing key" ++ show err
return Nothing
exportSigningGpgKey :: PSState -> IO (Maybe ByteString)
exportSigningGpgKey st = do
mbKey <- runPSM st getSigningKey
case mbKey of
Just key | privIdentKeyBackend key == "gpg" -> do
let kid = fromKeyID $ privIdentKeyID key
ctx <- Gpg.ctxNew Nothing
keys <- Gpg.getKeys ctx True
candidates <- filterM (\k -> (== Just kid) <$> Gpg.keyFingerprint k) keys
case candidates of
(k:_) -> Just <$> Gpg.exportKeys ctx [k]
_ -> return Nothing
_ -> return Nothing
signGPG :: MonadIO m =>
BS.ByteString
-> BS.ByteString
-> m BS.ByteString
signGPG kid bs = liftIO $ do
ctx <- Gpg.ctxNew Nothing
keys <- Gpg.getKeys ctx True
matches <- filterM (liftM (== Just kid) . Gpg.keyFingerprint) keys
case matches of
[] -> error "key does not exist" -- return Nothing
(p:_) -> do
sig <- Gpg.sign ctx bs p Gpg.SigModeDetach
logDebug $ "Signing " ++ show bs ++ " yielded " ++ show sig
return sig
gpgGuard :: (MonadIO m, MonadPlus m) => String -> Bool -> m ()
gpgGuard reason p = case p of
True -> return ()
False -> liftIO (errorM "Pontarius.Xmpp" reason) >> mzero
verifyGPG :: ByteString
-> ByteString
-> ByteString
-> IO Bool
verifyGPG kid sig txt = do
ctx <- Gpg.ctxNew Nothing
logDebug $ "Verifying signature " ++ show sig ++ " for " ++ show txt
res <- Ex.try $ Gpg.verifyDetach ctx txt sig -- Gpg.Error
case res of
Left (e :: Gpg.Error) -> do
errorM "Pontarius.Xmpp"
$ "Verifying signature threw exception" ++ show e
return False
Right [st] -> do
gpgGuard ("could not verify signature: " ++ show st)
(goodStat $ Gpg.status st)
gpgGuard ("Fingerpringt doesn't match: " ++ show kid
++ " /= " ++ show (Gpg.fingerprint st))
(Gpg.fingerprint st == kid)
debugM "Pontarius.Xmpp" $ "Signature seems good"
return True
Right [] -> do
errorM "Pontarius.Xmpp" "verifyGPG: Could not import pubkey"
return False
Right _ -> do
debugM "Pontarius.Xmpp" "multiple signature results"
return False
where
goodStat Gpg.SigStatGood = True
goodStat _ = False
importIdent :: MonadIO m => BS.ByteString -> PSM m [BS.ByteString]
importIdent ident = do
ids <- importKey ident
forM_ ids $ addPubIdent . toKeyID
return ids
verifySignature :: MonadIO m =>
PSState
-> Xmpp.Jid
-> BS.ByteString
-> BS.ByteString
-> BS.ByteString
-> m (Maybe BS.ByteString)
verifySignature st _peer pk sig pt = runPSM st $ do
ids <- importIdent pk
case ids of
[id] -> do
verified <- liftIO $ verifyGPG id sig pt
logDebug $ "Signature is: " ++ show verified
return $ if verified then (Just id) else Nothing
_ -> do
logDebug "import resulted in more than one key"
return Nothing
| Philonous/pontarius-service | source/Gpg.hs | agpl-3.0 | 7,982 | 0 | 19 | 2,603 | 1,961 | 968 | 993 | 183 | 5 |
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module Jaek.Project.Parse (
Parse (..)
)
where
import Jaek.Base
import Jaek.Gen
import Jaek.StreamExpr as SE
import Jaek.StreamT as ST
import Jaek.Tree
import Data.Attoparsec as A
import Data.Attoparsec.Binary
import Data.Attoparsec.Char8
import Data.ByteString.UTF8 (toString)
import Data.Tree
-- | a small helper function for reading UTF8-encoded text.
parseStr :: Parser String
parseStr = toString <$> (anyWord64le >>= A.take . fI)
class Parse a where
jparse :: Parser a
instance Parse a => Parse [a] where
jparse = anyWord64le >>= \n -> replicateM (fI n) jparse
instance Parse Int where
jparse = fI <$> anyWord64le
instance Parse SampleCount where
jparse = fI <$> anyWord64le
instance Parse Duration where
jparse = fI <$> anyWord64le
instance Parse Integer where
jparse = fI <$> anyWord64le
instance Parse AudioFormat where
jparse = AudioFormat <$> jparse <*> jparse <*> jparse
instance Parse Double where
jparse = try (double <* char 'z')
instance Parse GenFunc where
jparse = (Null <$ word8 0)
<|> (word8 1 *> (ConstF <$> jparse))
instance Parse StreamExpr where
jparse =
(word8 0 *> (FileSource <$> parseStr <*> jparse <*>
jparse <*> jparse <*> jparse))
<|> (word8 1 *> (GenSource <$> jparse <*> jparse))
<|> (word8 2 *> (Region <$> jparse <*> jparse <*> jparse))
<|> (word8 3 *> (StreamSeq <$> jparse))
<|> (word8 4 *> (SE.Mix <$> jparse <*> jparse))
instance Parse NodeRef where
jparse = (word8 0 *> (AbsPath <$> jparse))
<|> (word8 1 *> (RelPath <$> jparse <*> jparse))
instance Parse StreamT where
jparse = (word8 0 *> (Cut <$> jparse <*> jparse <*> jparse))
<|> (word8 1 *> (Mute <$> jparse <*> jparse <*> jparse))
<|> (word8 2 *> (Insert <$> jparse <*> jparse <*> jparse <*>
jparse <*> jparse <*> jparse))
<|> (word8 3 *> (ST.Mix <$> jparse <*> jparse <*> jparse <*>
jparse <*> jparse <*> jparse))
instance Parse Node where
jparse = Root <$ word8 0
<|> (word8 1 *> (Init <$> parseStr <*> jparse <*> jparse))
<|> (word8 2 *> (Mod <$> jparse <*> jparse <*> jparse))
instance Parse HTree where
jparse = Node <$> jparse <*> jparse
| JohnLato/jaek | src/Jaek/Project/Parse.hs | lgpl-3.0 | 2,426 | 0 | 17 | 674 | 788 | 421 | 367 | 58 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module AllDice.Scheme
( runExpr
) where
import Control.Monad.ST
import qualified Data.Text as T
-- Not ideal but should in theory work for now
import System.Random
-- Scheme interpreter
import Scheme.Types
import Scheme.Env
import Scheme.Parser
import Scheme.Evaluator
evalString :: LispEnv s -> T.Text -> ST s T.Text
evalString env expr = do
eexpr <- evalExpr env expr
case eexpr of
Left err -> return $ (T.pack . show) err
Right val -> return $ (T.pack . show) val
evalExpr :: LispEnv s -> T.Text -> ST s (ThrowsError (LispVal s))
evalExpr env expr =
case readExpr expr of
Left err -> return $ Left err
Right val -> eval env val
evalFile :: LispEnv s -> T.Text -> ST s T.Text
evalFile env expr = do
exps <- evalExprList env expr
T.unlines `fmap` mapM (\eexp -> case eexp of
Left err -> return $ (T.pack . show) err
Right val -> return $ (T.pack . show) val) exps
evalExprList :: LispEnv s -> T.Text -> ST s [ThrowsError (LispVal s)]
evalExprList env expr =
case readExprList expr of
Left err -> return [Left err]
Right val -> mapM (eval env) val
runExpr :: T.Text -> T.Text -> StdGen -> ST s T.Text
runExpr stdlib val gen = do
env <- primitiveBindings
-- Inject a val
env' <- bindVars env [("stdRngGen", Random gen)]
-- TODO: a nicer way to inject the stdlib into the env
-- TODO: add error reporting for invalid/bad stdlib
_ <- evalFile env' stdlib
-- Run the scheme program given
evalString env' val
| pharaun/alldice | src/AllDice/Scheme.hs | apache-2.0 | 1,583 | 0 | 18 | 403 | 549 | 272 | 277 | 38 | 2 |
module Quasar.Utils where
import Data.Aeson
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as LBS
import Control.Concurrent
import Control.Monad.IO.Class
import qualified Network.Wai as W
import qualified Network.Wai.Handler.Warp as W (run)
import System.Exit
import System.IO
-- TODO: move to separate module
eitherDecodeBs :: FromJSON a => BS.ByteString -> Either String a
eitherDecodeBs = eitherDecode . bsToLbs
bsToLbs :: BS.ByteString -> LBS.ByteString
bsToLbs = LBS.fromChunks . BS.lines
lbsToBs :: LBS.ByteString -> BS.ByteString
lbsToBs = BS.concat . LBS.toChunks
warp :: Int -> W.Application -> IO ()
warp port app = W.run port app
forkWarp :: Int -> W.Application -> IO ThreadId
forkWarp port app = forkIO $ W.run port app
exitOnInput = do
hSetBuffering stdin NoBuffering
_ <- getChar
exitSuccess | xdcrafts/Quasar | src/Quasar/Utils.hs | apache-2.0 | 855 | 0 | 8 | 129 | 258 | 144 | 114 | 24 | 1 |
module Data.SpecHelper where
import qualified Data.Geospatial as Geospatial
import qualified Data.LinearRing as LinearRing
import qualified Data.LineString as LineString
import qualified Data.Sequence as Sequence
import qualified Data.Geometry.VectorTile.VectorTile as VectorTile
import qualified Data.Geometry.Types.Geography as TypesGeography
tupleToPts :: [(Int, Int)] -> Sequence.Seq VectorTile.Point
tupleToPts = foldr (\(x,y) acc -> VectorTile.Point x y Sequence.<| acc) Sequence.empty
mkLineString :: (Double, Double) -> (Double, Double) -> [(Double, Double)] -> LineString.LineString Geospatial.GeoPositionWithoutCRS
mkLineString p1 p2 rest = LineString.makeLineString (tupleToGeoPts p1) (tupleToGeoPts p2) (Sequence.fromList $ fmap tupleToGeoPts rest)
mkLinearRing :: (Double, Double) -> (Double, Double) -> (Double, Double) -> [(Double, Double)] -> LinearRing.LinearRing Geospatial.GeoPositionWithoutCRS
mkLinearRing p1 p2 p3 rest = LinearRing.makeLinearRing (tupleToGeoPts p1) (tupleToGeoPts p2) (tupleToGeoPts p3) (Sequence.fromList $ fmap tupleToGeoPts rest)
tupleToGeoPts :: (Double, Double) -> Geospatial.GeoPositionWithoutCRS
tupleToGeoPts (x, y) = Geospatial.GeoPointXY (Geospatial.PointXY x y)
listToSequenceGeo :: [(Double, Double)] -> Sequence.Seq Geospatial.PointXY
listToSequenceGeo pts = Sequence.fromList $ fmap (uncurry Geospatial.PointXY) pts
listToSequenceGeoLine :: [((Double, Double),(Double, Double))] -> Sequence.Seq TypesGeography.GeoStorableLine
listToSequenceGeoLine pts = Sequence.fromList $ fmap (\(x, y) -> TypesGeography.GeoStorableLine (uncurry Geospatial.PointXY x) (uncurry Geospatial.PointXY y)) pts
| sitewisely/zellige | test/Data/SpecHelper.hs | apache-2.0 | 1,717 | 0 | 12 | 244 | 523 | 294 | 229 | 19 | 1 |
module Step_1_4 where
-- Great! You've run your first Haskell code.
import Data.List
-- This line just makes some utility functions from the Data.List module accessible.
-- Let's write those unix command lines as Haskell code:
input = "`Twas brillig, and the slithy toves\n"
++ "Did gyre and gimble in the wabe;\n"
++ "All mimsy were the borogoves,\n"
++ "And the mome raths outgrabe.\n"
output = output1
output1 = unlines ( sort (lines input))
-- This does just what you think it does!
output2 = unlines ( take 2 ( map reverse ( lines input )))
-- This does just what you think it does!
output3 = show ( length (words input))
-- What does this do?
-- Notice that we had to convert the number result of length to a String with show
output4 = "\t" ++ show ( length (lines input))
++ "\t" ++ show ( length (words input))
++ "\t" ++ show (length input)
-- how about this?
-- NEXT
-- Most haskell code doesn't have so many parentheses. Instead, it uses the $ operator like so:
output1' = unlines $ sort $ lines input
output2' = unlines $ take 2 $ map reverse $ lines input
output3' = show $ length $ words input
output4' = "\t" ++ (show $ length $ lines input)
++ "\t" ++ (show $ length $ words input)
++ "\t" ++ (show $ length input)
-- You can think of $ as parenthesizing everything on the right, even other $ to the right.
| mzero/barley | seed/Chapter1/Step_1_4.hs | apache-2.0 | 1,364 | 0 | 14 | 293 | 297 | 155 | 142 | 19 | 1 |
-- http://www.codewars.com/kata/552fd698ac49561baf00006e
module WordBreak where
import Control.Arrow
import Data.List
wordBreak :: [String] -> String -> Maybe [String]
wordBreak xss = fmap reverse . fst . foldl f (Just [], []) where
f (onBound, insides) y = (onBound', insides') where
as = insides ++ case onBound of
Nothing -> []
Just wds -> map (id &&& (: wds)) xss
cs = [(bs, wds) | (b : bs, wds) <- as, b==y]
(onBounds, insides') = partition (null . fst) cs
onBound' = case onBounds of
[] -> Nothing
((_, wds) : _) -> Just wds
| Bodigrim/katas | src/haskell/B-Breaking-into-words.hs | bsd-2-clause | 588 | 0 | 16 | 150 | 249 | 136 | 113 | 14 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
module Application.Scaffold.Type where
import System.Console.CmdArgs
data Scaffold = MakeApp { config :: FilePath }
| MakeYesodCrud { config :: FilePath }
deriving (Show,Data,Typeable)
makeapp :: Scaffold
makeapp = MakeApp { config = "test.conf" }
makeyesodcrud :: Scaffold
makeyesodcrud = MakeYesodCrud { config = "test.conf" }
mode :: Scaffold
mode = modes [ makeapp, makeyesodcrud ]
| wavewave/scaffold | lib/Application/Scaffold/Type.hs | bsd-2-clause | 460 | 0 | 8 | 97 | 110 | 67 | 43 | 12 | 1 |
module Command.Edit.Perform
( EditOptions(..)
, perform
) where
import Data.Maybe
import qualified System.Posix.Env as P
import qualified System.Posix.Temp as P
-- | Get the preferred editor. If no editor is set, "vi" is used as the
-- default.
getEditor :: EditOptions -- ^ command-line options
-> IO String -- ^ editor name
getEditor options = do
let optEditor = editOptEditor options
envEditor <- P.getEnv "EDITOR"
return $ head $ catMaybes [optEditor, envEditor, Just "vi"]
-- | Edit the data points in text editor.
perform :: EditOptions
-> IO ()
perform options = do
| lovasko/swim | src/Command/Edit/Perform.hs | bsd-2-clause | 607 | 1 | 11 | 127 | 138 | 79 | 59 | -1 | -1 |
module Handler.About where
import Import
getAboutR :: Handler RepHtml
getAboutR = defaultLayout $ do
setTitle "What is DH? | About"
$(widgetFile "about")
| erochest/whatisdh | Handler/About.hs | bsd-2-clause | 166 | 0 | 10 | 34 | 42 | 21 | 21 | 6 | 1 |
{- |
- Module : Types.Internal.Channel
- Description : Representation of an IRC channel name.
- Copyright : (c) Magnus Stavngaard, 2017
- License : BSD-3
- Maintainer : [email protected]
- Stability : experimental
- Portability : POSIX
-
- An IRC channel starts with one of the characters #, + or & and is after that
- any octet stream without the charaters '\0', '\a', '\r', '\n', ' ', ',' or
- ':'.
-}
module Types.Internal.Channel where
import Data.Aeson (ToJSON(..), FromJSON(..), withText)
import qualified Data.Aeson.Types as Aeson
import Data.Maybe (isJust)
import qualified Data.Text as T
import qualified Parsers.Utils as PU
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary, shrink)
import Test.QuickCheck.Gen (suchThat)
import qualified Text.Parsec as P
{- | IRC channel. -}
newtype Channel = Channel String deriving (Show, Read, Eq)
{- | Smart constructor for Channels, only allow correct IRC channels to be
- constructed. -}
channel :: String
-- ^ Source text of Channel.
-> Maybe Channel
channel chan = case P.parse (PU.channel <* P.eof) "(channel source)" chan of
Right c -> return $ Channel c
Left _ -> Nothing
{- | Get the actual channel from the Channel type. -}
getChannel :: Channel
-- ^ The Channel to get channel from.
-> String
getChannel (Channel chan) = chan
{- | Construct arbitrary IRC channels. -}
instance Arbitrary Channel where
arbitrary = Channel <$> suchThat arbitrary (isJust . channel)
shrink (Channel chan) =
[Channel chan' | chan' <- shrink chan
, (isJust . channel) chan'
]
{- | Convert Channel's to JSON. -}
instance ToJSON Channel where
toJSON (Channel chan) = Aeson.String . T.pack $ chan
{- | Parse Channel's from JSON. -}
instance FromJSON Channel where
parseJSON = withText "channel" $ return . Channel . T.unpack
| bus000/Dikunt | src/Types/Internal/Channel.hs | bsd-3-clause | 1,865 | 0 | 10 | 385 | 364 | 206 | 158 | 27 | 2 |
module Data.Graph.Libgraph.UnionFind
( UF
, fromList
, find
, union
) where
import Data.UnionFind.IntMap( Point,PointSupply,newPointSupply
, fresh,repr,descriptor)
import qualified Data.UnionFind.IntMap as UF
import Data.IntMap.Lazy(IntMap,(!))
import qualified Data.IntMap.Lazy as IM
data UF = UF {ps :: PointSupply Int, im :: IntMap (Point Int)}
fromList :: [Int] -> UF
fromList xs = foldl singleton (UF newPointSupply IM.empty) xs
singleton :: UF -> Int -> UF
singleton uf x = UF ps' $ IM.insert x p (im uf)
where (ps',p) = fresh (ps uf) x
point :: UF -> Int -> Point Int
point uf i = (im uf) ! i
-- MF TODO: isn't the find supposed to update uf?
find :: UF -> Int -> Int
find uf = (descriptor $ ps uf) . (repr $ ps uf) . (point uf)
union :: UF -> Int -> Int -> UF
union uf x y = uf { ps = UF.union (ps uf) (point uf x) (point uf y) }
| MaartenFaddegon/libgraph | Data/Graph/Libgraph/UnionFind.hs | bsd-3-clause | 875 | 0 | 11 | 195 | 382 | 211 | 171 | 22 | 1 |
{-# LANGUAGE TemplateHaskell #-}
------------------------------------------------------------------------------
-- | This module includes the machinery necessary to use hint to load
-- action code dynamically. It includes a Template Haskell function
-- to gather the necessary compile-time information about code
-- location, compiler arguments, etc, and bind that information into
-- the calls to the dynamic loader.
module Snap.Loader.Dynamic
( loadSnapTH
) where
------------------------------------------------------------------------------
import Control.Concurrent
import Control.Monad (liftM2, forever)
#if !MIN_VERSION_base(4,7,0)
import Data.Char (isAlphaNum)
#endif
import Data.List
import Data.Maybe (maybeToList)
import Data.Time.Clock (diffUTCTime, getCurrentTime)
import Data.Typeable
import Language.Haskell.Interpreter hiding (lift, typeOf)
import Language.Haskell.Interpreter.Unsafe
import Language.Haskell.TH
import System.Environment (getArgs)
import Snap.Core
import Snap.Loader.Dynamic.Signal
import Snap.Loader.Dynamic.Evaluator
import Snap.Loader.Dynamic.TreeWatcher
------------------------------------------------------------------------------
-- | This function derives all the information necessary to use the interpreter
-- from the compile-time environment, and compiles it in to the generated code.
--
-- This could be considered a TH wrapper around a function
--
-- > loadSnap :: Typeable a => IO a -> (a -> IO (Snap (), IO ()))
-- > -> [String] -> IO (a, Snap (), IO ())
--
-- with a magical implementation. The [String] argument is a list of
-- directories to watch for updates to trigger a reloading. Directories
-- containing code should be automatically picked up by this splice.
--
-- The generated splice executes the initialiser once, sets up the interpreter
-- for the load function, and returns the initializer's result along with the
-- interpreter's proxy handler and cleanup actions. The behavior of the proxy
-- actions will change to reflect changes in the watched files, reinterpreting
-- the load function as needed and applying it to the initializer result.
--
-- This will handle reloading the application successfully in most cases. The
-- cases in which it is certain to fail are those involving changing the types
-- of the initializer or the load function, or changing the compiler options
-- required, such as by changing/adding dependencies in the project's .cabal
-- file. In those cases, a full recompile will be needed.
--
loadSnapTH :: Q Exp -- ^ the initializer expression
-> Name -- ^ the name of the load function
-> [String] -- ^ a list of directories to watch in addition
-- to those containing code
-> Q Exp
loadSnapTH initializer action additionalWatchDirs = do
args <- runIO getArgs
let opts = getHintOpts args
srcPaths = additionalWatchDirs ++ getSrcPaths args
-- The first line is an extra type check to ensure the arguments
-- provided have the the correct types
[| do let _ = $initializer >>= $(varE action)
v <- $initializer
(handler, cleanup) <- hintSnap opts actMods srcPaths loadStr v
return (v, handler, cleanup) |]
where
actMods = maybeToList $ nameModule action
loadStr = nameBase action
------------------------------------------------------------------------------
-- | Convert the command-line arguments passed in to options for the
-- hint interpreter. This is somewhat brittle code, based on a few
-- experimental datapoints regarding the structure of the command-line
-- arguments cabal produces.
getHintOpts :: [String] -> [String]
getHintOpts args = removeBad opts
where
--------------------------------------------------------------------------
bad = ["-threaded", "-O", "-main-is", "-o", "--make", "-static", "-XHaskell", "-ddump-hi"]
--------------------------------------------------------------------------
removeBad = filter (\x -> not $ any (`isPrefixOf` x) bad)
--------------------------------------------------------------------------
hideAll = filter (== "-hide-all-packages") args
--------------------------------------------------------------------------
srcOpts = filter (\x -> "-i" `isPrefixOf` x) args
--------------------------------------------------------------------------
toCopy = filter (not . isSuffixOf ".hs") $
dropWhile (not . ("-package" `isPrefixOf`)) args
--------------------------------------------------------------------------
copy = map (intercalate " ")
. groupBy (\_ s -> not $ "-" `isPrefixOf` s)
--------------------------------------------------------------------------
opts = concat [hideAll, srcOpts, copy toCopy]
------------------------------------------------------------------------------
-- | This function extracts the source paths from the compilation args
getSrcPaths :: [String] -> [String]
getSrcPaths = filter (not . null) . map (drop 2) . filter srcArg
where
srcArg x = "-i" `isPrefixOf` x && not ("-idist" `isPrefixOf` x)
------------------------------------------------------------------------------
-- | This function creates the Snap handler that actually is responsible for
-- doing the dynamic loading of actions via hint, given all of the
-- configuration information that the interpreter needs. It also ensures safe
-- concurrent access to the interpreter, and caches the interpreter results for
-- a short time before allowing it to run again.
--
-- Generally, this won't be called manually. Instead, loadSnapTH will generate
-- a call to it at compile-time, calculating all the arguments from its
-- environment.
--
hintSnap :: Typeable a
=> [String]
-- ^ A list of command-line options for the interpreter
-> [String]
-- ^ A list of modules that need to be interpreted. This should
-- contain only the modules which contain the initialization,
-- cleanup, and handler actions. Everything else they require will
-- be loaded transitively.
-> [String]
-- ^ A list of paths to watch for updates
-> String
-- ^ The name of the function to load
-> a
-- ^ The value to apply the loaded function to
-> IO (Snap (), IO ())
hintSnap opts modules srcPaths action value = do
load <- runInterpreterThread
protectedHintEvaluator getCurrentState testState load
where
--------------------------------------------------------------------------
witness x = undefined $ x `asTypeOf` value :: HintLoadable
#if MIN_VERSION_base(4,7,0)
--------------------------------------------------------------------------
witnessModules = filter (`notElem` inPrelude) . map dropInternal .
map tyConModule . tyCons . typeOf $ witness
--------------------------------------------------------------------------
inPrelude = ["GHC.Prim", "GHC.Types", "GHC.Tuple"]
--------------------------------------------------------------------------
tyCons x = let (c, rs) = splitTyConApp x in c : concatMap tyCons rs
--------------------------------------------------------------------------
dropInternal s = case stripPrefix "Snap.Internal." s of
Nothing -> s
Just "Types" -> "Snap.Core"
Just x -> "Snap." ++ x
#else
--------------------------------------------------------------------------
-- This is somewhat fragile, and probably can be cleaned up with a future
-- version of Typeable. For the moment, and backwards-compatibility, this
-- is the approach being taken.
witnessModules = map (reverse . drop 1 . dropWhile (/= '.') . reverse) .
filter (elem '.') . groupBy typePart . show . typeOf $
witness
--------------------------------------------------------------------------
typePart x y = (isAlphaNum x && isAlphaNum y) || x == '.' || y == '.'
#endif
--------------------------------------------------------------------------
runInterpreterThread = do
input <- newEmptyMVar
output <- newEmptyMVar
forkIO . forever $ do
restore <- protectHandlers
err <- unsafeRunInterpreterWithArgs opts $ do
liftIO $ restore
forever $ do
liftIO $ takeMVar input
loadModules . nub $ modules
setImports . nub $ "Prelude" : "Snap.Core" :
witnessModules ++ modules
f <- interpret action witness
liftIO . putMVar output $ f value
reset
putMVar output $ formatOnError err
return $ putMVar input () >> takeMVar output
--------------------------------------------------------------------------
formatOnError (Left err) = error $ format err
formatOnError (Right a) = a
--------------------------------------------------------------------------
getCurrentState = liftM2 (,) getCurrentTime $ getTreeStatus srcPaths
--------------------------------------------------------------------------
testState (prevTime, ts) = do
now <- getCurrentTime
if diffUTCTime now prevTime < 3
then return True
else checkTreeStatus ts
------------------------------------------------------------------------------
-- | Convert an InterpreterError to a String for presentation
format :: InterpreterError -> String
format (UnknownError e) = "Unknown interpreter error:\r\n\r\n" ++ e
format (NotAllowed e) = "Interpreter action not allowed:\r\n\r\n" ++ e
format (GhcException e) = "GHC error:\r\n\r\n" ++ e
format (WontCompile errs) = "Compile errors:\r\n\r\n" ++
(intercalate "\r\n" $ nub $ map errMsg errs)
| snapframework/snap-loader-dynamic | src/Snap/Loader/Dynamic.hs | bsd-3-clause | 10,134 | 4 | 24 | 2,313 | 1,274 | 703 | 571 | 92 | 3 |
{-# LANGUAGE CPP, GADTs, UnboxedTuples #-}
-----------------------------------------------------------------------------
--
-- Monad for Stg to C-- code generation
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmMonad (
FCode, -- type
initC, runC, thenC, thenFC, listCs,
returnFC, fixC,
newUnique, newUniqSupply,
newLabelC, emitLabel,
emit, emitDecl, emitProc,
emitProcWithConvention, emitProcWithStackFrame,
emitOutOfLine, emitAssign, emitStore, emitComment,
emitTick, emitUnwind,
getCmm, aGraphToGraph,
getCodeR, getCode, getCodeScoped, getHeapUsage,
mkCmmIfThenElse, mkCmmIfThen, mkCmmIfGoto,
mkCall, mkCmmCall,
forkClosureBody, forkLneBody, forkAlts, codeOnly,
ConTagZ,
Sequel(..), ReturnKind(..),
withSequel, getSequel,
setTickyCtrLabel, getTickyCtrLabel,
tickScope, getTickScope,
withUpdFrameOff, getUpdFrameOff, initUpdFrameOff,
HeapUsage(..), VirtualHpOffset, initHpUsage,
getHpUsage, setHpUsage, heapHWM,
setVirtHp, getVirtHp, setRealHp,
getModuleName,
-- ideally we wouldn't export these, but some other modules access internal state
getState, setState, getSelfLoop, withSelfLoop, getInfoDown, getDynFlags, getThisPackage,
-- more localised access to monad state
CgIdInfo(..),
getBinds, setBinds,
-- out of general friendliness, we also export ...
CgInfoDownwards(..), CgState(..) -- non-abstract
) where
#include "HsVersions.h"
import Cmm
import StgCmmClosure
import DynFlags
import Hoopl
import Maybes
import MkGraph
import BlockId
import CLabel
import SMRep
import Module
import Id
import VarEnv
import OrdList
import Unique
import UniqSupply
import FastString
import Outputable
import qualified Control.Applicative as A
import Control.Monad
import Data.List
import Prelude hiding( sequence, succ )
infixr 9 `thenC` -- Right-associative!
infixr 9 `thenFC`
--------------------------------------------------------
-- The FCode monad and its types
--
-- FCode is the monad plumbed through the Stg->Cmm code generator, and
-- the Cmm parser. It contains the following things:
--
-- - A writer monad, collecting:
-- - code for the current function, in the form of a CmmAGraph.
-- The function "emit" appends more code to this.
-- - the top-level CmmDecls accumulated so far
--
-- - A state monad with:
-- - the local bindings in scope
-- - the current heap usage
-- - a UniqSupply
--
-- - A reader monad, for CgInfoDownwards, containing
-- - DynFlags,
-- - the current Module
-- - the update-frame offset
-- - the ticky counter label
-- - the Sequel (the continuation to return to)
-- - the self-recursive tail call information
--------------------------------------------------------
newtype FCode a = FCode (CgInfoDownwards -> CgState -> (# a, CgState #))
instance Functor FCode where
fmap f (FCode g) = FCode $ \i s -> case g i s of (# a, s' #) -> (# f a, s' #)
instance A.Applicative FCode where
pure = returnFC
(<*>) = ap
instance Monad FCode where
(>>=) = thenFC
return = A.pure
{-# INLINE thenC #-}
{-# INLINE thenFC #-}
{-# INLINE returnFC #-}
initC :: IO CgState
initC = do { uniqs <- mkSplitUniqSupply 'c'
; return (initCgState uniqs) }
runC :: DynFlags -> Module -> CgState -> FCode a -> (a,CgState)
runC dflags mod st fcode = doFCode fcode (initCgInfoDown dflags mod) st
returnFC :: a -> FCode a
returnFC val = FCode (\_info_down state -> (# val, state #))
thenC :: FCode () -> FCode a -> FCode a
thenC (FCode m) (FCode k) =
FCode $ \info_down state -> case m info_down state of
(# _,new_state #) -> k info_down new_state
listCs :: [FCode ()] -> FCode ()
listCs [] = return ()
listCs (fc:fcs) = do
fc
listCs fcs
thenFC :: FCode a -> (a -> FCode c) -> FCode c
thenFC (FCode m) k = FCode $
\info_down state ->
case m info_down state of
(# m_result, new_state #) ->
case k m_result of
FCode kcode -> kcode info_down new_state
fixC :: (a -> FCode a) -> FCode a
fixC fcode = FCode (
\info_down state ->
let
(v,s) = doFCode (fcode v) info_down state
in
(# v, s #)
)
--------------------------------------------------------
-- The code generator environment
--------------------------------------------------------
-- This monadery has some information that it only passes
-- *downwards*, as well as some ``state'' which is modified
-- as we go along.
data CgInfoDownwards -- information only passed *downwards* by the monad
= MkCgInfoDown {
cgd_dflags :: DynFlags,
cgd_mod :: Module, -- Module being compiled
cgd_updfr_off :: UpdFrameOffset, -- Size of current update frame
cgd_ticky :: CLabel, -- Current destination for ticky counts
cgd_sequel :: Sequel, -- What to do at end of basic block
cgd_self_loop :: Maybe SelfLoopInfo,-- Which tail calls can be compiled
-- as local jumps? See Note
-- [Self-recursive tail calls] in
-- StgCmmExpr
cgd_tick_scope:: CmmTickScope -- Tick scope for new blocks & ticks
}
type CgBindings = IdEnv CgIdInfo
data CgIdInfo
= CgIdInfo
{ cg_id :: Id -- Id that this is the info for
-- Can differ from the Id at occurrence sites by
-- virtue of being externalised, for splittable C
-- See Note [Externalise when splitting]
, cg_lf :: LambdaFormInfo
, cg_loc :: CgLoc -- CmmExpr for the *tagged* value
}
-- Note [Externalise when splitting]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- If we're splitting the object with -fsplit-objs, we need to
-- externalise *all* the top-level names, and then make sure we only
-- use the externalised one in any C label we use which refers to this
-- name.
instance Outputable CgIdInfo where
ppr (CgIdInfo { cg_id = id, cg_loc = loc })
= ppr id <+> text "-->" <+> ppr loc
-- Sequel tells what to do with the result of this expression
data Sequel
= Return Bool -- Return result(s) to continuation found on the stack.
-- True <=> the continuation is update code (???)
| AssignTo
[LocalReg] -- Put result(s) in these regs and fall through
-- NB: no void arguments here
--
Bool -- Should we adjust the heap pointer back to
-- recover space that's unused on this path?
-- We need to do this only if the expression
-- may allocate (e.g. it's a foreign call or
-- allocating primOp)
instance Outputable Sequel where
ppr (Return b) = text "Return" <+> ppr b
ppr (AssignTo regs b) = text "AssignTo" <+> ppr regs <+> ppr b
-- See Note [sharing continuations] below
data ReturnKind
= AssignedDirectly
| ReturnedTo BlockId ByteOff
-- Note [sharing continuations]
--
-- ReturnKind says how the expression being compiled returned its
-- results: either by assigning directly to the registers specified
-- by the Sequel, or by returning to a continuation that does the
-- assignments. The point of this is we might be able to re-use the
-- continuation in a subsequent heap-check. Consider:
--
-- case f x of z
-- True -> <True code>
-- False -> <False code>
--
-- Naively we would generate
--
-- R2 = x -- argument to f
-- Sp[young(L1)] = L1
-- call f returns to L1
-- L1:
-- z = R1
-- if (z & 1) then Ltrue else Lfalse
-- Ltrue:
-- Hp = Hp + 24
-- if (Hp > HpLim) then L4 else L7
-- L4:
-- HpAlloc = 24
-- goto L5
-- L5:
-- R1 = z
-- Sp[young(L6)] = L6
-- call stg_gc_unpt_r1 returns to L6
-- L6:
-- z = R1
-- goto L1
-- L7:
-- <True code>
-- Lfalse:
-- <False code>
--
-- We want the gc call in L4 to return to L1, and discard L6. Note
-- that not only can we share L1 and L6, but the assignment of the
-- return address in L4 is unnecessary because the return address for
-- L1 is already on the stack. We used to catch the sharing of L1 and
-- L6 in the common-block-eliminator, but not the unnecessary return
-- address assignment.
--
-- Since this case is so common I decided to make it more explicit and
-- robust by programming the sharing directly, rather than relying on
-- the common-block elimiantor to catch it. This makes
-- common-block-elimianteion an optional optimisation, and furthermore
-- generates less code in the first place that we have to subsequently
-- clean up.
--
-- There are some rarer cases of common blocks that we don't catch
-- this way, but that's ok. Common-block-elimination is still available
-- to catch them when optimisation is enabled. Some examples are:
--
-- - when both the True and False branches do a heap check, we
-- can share the heap-check failure code L4a and maybe L4
--
-- - in a case-of-case, there might be multiple continuations that
-- we can common up.
--
-- It is always safe to use AssignedDirectly. Expressions that jump
-- to the continuation from multiple places (e.g. case expressions)
-- fall back to AssignedDirectly.
--
initCgInfoDown :: DynFlags -> Module -> CgInfoDownwards
initCgInfoDown dflags mod
= MkCgInfoDown { cgd_dflags = dflags
, cgd_mod = mod
, cgd_updfr_off = initUpdFrameOff dflags
, cgd_ticky = mkTopTickyCtrLabel
, cgd_sequel = initSequel
, cgd_self_loop = Nothing
, cgd_tick_scope= GlobalScope }
initSequel :: Sequel
initSequel = Return False
initUpdFrameOff :: DynFlags -> UpdFrameOffset
initUpdFrameOff dflags = widthInBytes (wordWidth dflags) -- space for the RA
--------------------------------------------------------
-- The code generator state
--------------------------------------------------------
data CgState
= MkCgState {
cgs_stmts :: CmmAGraph, -- Current procedure
cgs_tops :: OrdList CmmDecl,
-- Other procedures and data blocks in this compilation unit
-- Both are ordered only so that we can
-- reduce forward references, when it's easy to do so
cgs_binds :: CgBindings,
cgs_hp_usg :: HeapUsage,
cgs_uniqs :: UniqSupply }
data HeapUsage -- See Note [Virtual and real heap pointers]
= HeapUsage {
virtHp :: VirtualHpOffset, -- Virtual offset of highest-allocated word
-- Incremented whenever we allocate
realHp :: VirtualHpOffset -- realHp: Virtual offset of real heap ptr
-- Used in instruction addressing modes
}
type VirtualHpOffset = WordOff
{- Note [Virtual and real heap pointers]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The code generator can allocate one or more objects contiguously, performing
one heap check to cover allocation of all the objects at once. Let's call
this little chunk of heap space an "allocation chunk". The code generator
will emit code to
* Perform a heap-exhaustion check
* Move the heap pointer to the end of the allocation chunk
* Allocate multiple objects within the chunk
The code generator uses VirtualHpOffsets to address words within a
single allocation chunk; these start at one and increase positively.
The first word of the chunk has VirtualHpOffset=1, the second has
VirtualHpOffset=2, and so on.
* The field realHp tracks (the VirtualHpOffset) where the real Hp
register is pointing. Typically it'll be pointing to the end of the
allocation chunk.
* The field virtHp gives the VirtualHpOffset of the highest-allocated
word so far. It starts at zero (meaning no word has been allocated),
and increases whenever an object is allocated.
The difference between realHp and virtHp gives the offset from the
real Hp register of a particular word in the allocation chunk. This
is what getHpRelOffset does. Since the returned offset is relative
to the real Hp register, it is valid only until you change the real
Hp register. (Changing virtHp doesn't matter.)
-}
initCgState :: UniqSupply -> CgState
initCgState uniqs
= MkCgState { cgs_stmts = mkNop
, cgs_tops = nilOL
, cgs_binds = emptyVarEnv
, cgs_hp_usg = initHpUsage
, cgs_uniqs = uniqs }
stateIncUsage :: CgState -> CgState -> CgState
-- stateIncUsage@ e1 e2 incorporates in e1
-- the heap high water mark found in e2.
stateIncUsage s1 s2@(MkCgState { cgs_hp_usg = hp_usg })
= s1 { cgs_hp_usg = cgs_hp_usg s1 `maxHpHw` virtHp hp_usg }
`addCodeBlocksFrom` s2
addCodeBlocksFrom :: CgState -> CgState -> CgState
-- Add code blocks from the latter to the former
-- (The cgs_stmts will often be empty, but not always; see codeOnly)
s1 `addCodeBlocksFrom` s2
= s1 { cgs_stmts = cgs_stmts s1 MkGraph.<*> cgs_stmts s2,
cgs_tops = cgs_tops s1 `appOL` cgs_tops s2 }
-- The heap high water mark is the larger of virtHp and hwHp. The latter is
-- only records the high water marks of forked-off branches, so to find the
-- heap high water mark you have to take the max of virtHp and hwHp. Remember,
-- virtHp never retreats!
--
-- Note Jan 04: ok, so why do we only look at the virtual Hp??
heapHWM :: HeapUsage -> VirtualHpOffset
heapHWM = virtHp
initHpUsage :: HeapUsage
initHpUsage = HeapUsage { virtHp = 0, realHp = 0 }
maxHpHw :: HeapUsage -> VirtualHpOffset -> HeapUsage
hp_usg `maxHpHw` hw = hp_usg { virtHp = virtHp hp_usg `max` hw }
--------------------------------------------------------
-- Operators for getting and setting the state and "info_down".
--------------------------------------------------------
getState :: FCode CgState
getState = FCode $ \_info_down state -> (# state, state #)
setState :: CgState -> FCode ()
setState state = FCode $ \_info_down _ -> (# (), state #)
getHpUsage :: FCode HeapUsage
getHpUsage = do
state <- getState
return $ cgs_hp_usg state
setHpUsage :: HeapUsage -> FCode ()
setHpUsage new_hp_usg = do
state <- getState
setState $ state {cgs_hp_usg = new_hp_usg}
setVirtHp :: VirtualHpOffset -> FCode ()
setVirtHp new_virtHp
= do { hp_usage <- getHpUsage
; setHpUsage (hp_usage {virtHp = new_virtHp}) }
getVirtHp :: FCode VirtualHpOffset
getVirtHp
= do { hp_usage <- getHpUsage
; return (virtHp hp_usage) }
setRealHp :: VirtualHpOffset -> FCode ()
setRealHp new_realHp
= do { hp_usage <- getHpUsage
; setHpUsage (hp_usage {realHp = new_realHp}) }
getBinds :: FCode CgBindings
getBinds = do
state <- getState
return $ cgs_binds state
setBinds :: CgBindings -> FCode ()
setBinds new_binds = do
state <- getState
setState $ state {cgs_binds = new_binds}
withState :: FCode a -> CgState -> FCode (a,CgState)
withState (FCode fcode) newstate = FCode $ \info_down state ->
case fcode info_down newstate of
(# retval, state2 #) -> (# (retval,state2), state #)
newUniqSupply :: FCode UniqSupply
newUniqSupply = do
state <- getState
let (us1, us2) = splitUniqSupply (cgs_uniqs state)
setState $ state { cgs_uniqs = us1 }
return us2
newUnique :: FCode Unique
newUnique = do
state <- getState
let (u,us') = takeUniqFromSupply (cgs_uniqs state)
setState $ state { cgs_uniqs = us' }
return u
------------------
getInfoDown :: FCode CgInfoDownwards
getInfoDown = FCode $ \info_down state -> (# info_down,state #)
getSelfLoop :: FCode (Maybe SelfLoopInfo)
getSelfLoop = do
info_down <- getInfoDown
return $ cgd_self_loop info_down
withSelfLoop :: SelfLoopInfo -> FCode a -> FCode a
withSelfLoop self_loop code = do
info_down <- getInfoDown
withInfoDown code (info_down {cgd_self_loop = Just self_loop})
instance HasDynFlags FCode where
getDynFlags = liftM cgd_dflags getInfoDown
getThisPackage :: FCode UnitId
getThisPackage = liftM thisPackage getDynFlags
withInfoDown :: FCode a -> CgInfoDownwards -> FCode a
withInfoDown (FCode fcode) info_down = FCode $ \_ state -> fcode info_down state
doFCode :: FCode a -> CgInfoDownwards -> CgState -> (a,CgState)
doFCode (FCode fcode) info_down state =
case fcode info_down state of
(# a, s #) -> ( a, s )
-- ----------------------------------------------------------------------------
-- Get the current module name
getModuleName :: FCode Module
getModuleName = do { info <- getInfoDown; return (cgd_mod info) }
-- ----------------------------------------------------------------------------
-- Get/set the end-of-block info
withSequel :: Sequel -> FCode a -> FCode a
withSequel sequel code
= do { info <- getInfoDown
; withInfoDown code (info {cgd_sequel = sequel, cgd_self_loop = Nothing }) }
getSequel :: FCode Sequel
getSequel = do { info <- getInfoDown
; return (cgd_sequel info) }
-- ----------------------------------------------------------------------------
-- Get/set the size of the update frame
-- We keep track of the size of the update frame so that we
-- can set the stack pointer to the proper address on return
-- (or tail call) from the closure.
-- There should be at most one update frame for each closure.
-- Note: I'm including the size of the original return address
-- in the size of the update frame -- hence the default case on `get'.
withUpdFrameOff :: UpdFrameOffset -> FCode a -> FCode a
withUpdFrameOff size code
= do { info <- getInfoDown
; withInfoDown code (info {cgd_updfr_off = size }) }
getUpdFrameOff :: FCode UpdFrameOffset
getUpdFrameOff
= do { info <- getInfoDown
; return $ cgd_updfr_off info }
-- ----------------------------------------------------------------------------
-- Get/set the current ticky counter label
getTickyCtrLabel :: FCode CLabel
getTickyCtrLabel = do
info <- getInfoDown
return (cgd_ticky info)
setTickyCtrLabel :: CLabel -> FCode a -> FCode a
setTickyCtrLabel ticky code = do
info <- getInfoDown
withInfoDown code (info {cgd_ticky = ticky})
-- ----------------------------------------------------------------------------
-- Manage tick scopes
-- | The current tick scope. We will assign this to generated blocks.
getTickScope :: FCode CmmTickScope
getTickScope = do
info <- getInfoDown
return (cgd_tick_scope info)
-- | Places blocks generated by the given code into a fresh
-- (sub-)scope. This will make sure that Cmm annotations in our scope
-- will apply to the Cmm blocks generated therein - but not the other
-- way around.
tickScope :: FCode a -> FCode a
tickScope code = do
info <- getInfoDown
if debugLevel (cgd_dflags info) == 0 then code else do
u <- newUnique
let scope' = SubScope u (cgd_tick_scope info)
withInfoDown code info{ cgd_tick_scope = scope' }
--------------------------------------------------------
-- Forking
--------------------------------------------------------
forkClosureBody :: FCode () -> FCode ()
-- forkClosureBody compiles body_code in environment where:
-- - sequel, update stack frame and self loop info are
-- set to fresh values
-- - state is set to a fresh value, except for local bindings
-- that are passed in unchanged. It's up to the enclosed code to
-- re-bind the free variables to a field of the closure.
forkClosureBody body_code
= do { dflags <- getDynFlags
; info <- getInfoDown
; us <- newUniqSupply
; state <- getState
; let body_info_down = info { cgd_sequel = initSequel
, cgd_updfr_off = initUpdFrameOff dflags
, cgd_self_loop = Nothing }
fork_state_in = (initCgState us) { cgs_binds = cgs_binds state }
((),fork_state_out) = doFCode body_code body_info_down fork_state_in
; setState $ state `addCodeBlocksFrom` fork_state_out }
forkLneBody :: FCode a -> FCode a
-- 'forkLneBody' takes a body of let-no-escape binding and compiles
-- it in the *current* environment, returning the graph thus constructed.
--
-- The current environment is passed on completely unchanged to
-- the successor. In particular, any heap usage from the enclosed
-- code is discarded; it should deal with its own heap consumption.
forkLneBody body_code
= do { info_down <- getInfoDown
; us <- newUniqSupply
; state <- getState
; let fork_state_in = (initCgState us) { cgs_binds = cgs_binds state }
(result, fork_state_out) = doFCode body_code info_down fork_state_in
; setState $ state `addCodeBlocksFrom` fork_state_out
; return result }
codeOnly :: FCode () -> FCode ()
-- Emit any code from the inner thing into the outer thing
-- Do not affect anything else in the outer state
-- Used in almost-circular code to prevent false loop dependencies
codeOnly body_code
= do { info_down <- getInfoDown
; us <- newUniqSupply
; state <- getState
; let fork_state_in = (initCgState us) { cgs_binds = cgs_binds state
, cgs_hp_usg = cgs_hp_usg state }
((), fork_state_out) = doFCode body_code info_down fork_state_in
; setState $ state `addCodeBlocksFrom` fork_state_out }
forkAlts :: [FCode a] -> FCode [a]
-- (forkAlts' bs d) takes fcodes 'bs' for the branches of a 'case', and
-- an fcode for the default case 'd', and compiles each in the current
-- environment. The current environment is passed on unmodified, except
-- that the virtual Hp is moved on to the worst virtual Hp for the branches
forkAlts branch_fcodes
= do { info_down <- getInfoDown
; us <- newUniqSupply
; state <- getState
; let compile us branch
= (us2, doFCode branch info_down branch_state)
where
(us1,us2) = splitUniqSupply us
branch_state = (initCgState us1) {
cgs_binds = cgs_binds state
, cgs_hp_usg = cgs_hp_usg state }
(_us, results) = mapAccumL compile us branch_fcodes
(branch_results, branch_out_states) = unzip results
; setState $ foldl stateIncUsage state branch_out_states
-- NB foldl. state is the *left* argument to stateIncUsage
; return branch_results }
-- collect the code emitted by an FCode computation
getCodeR :: FCode a -> FCode (a, CmmAGraph)
getCodeR fcode
= do { state1 <- getState
; (a, state2) <- withState fcode (state1 { cgs_stmts = mkNop })
; setState $ state2 { cgs_stmts = cgs_stmts state1 }
; return (a, cgs_stmts state2) }
getCode :: FCode a -> FCode CmmAGraph
getCode fcode = do { (_,stmts) <- getCodeR fcode; return stmts }
-- | Generate code into a fresh tick (sub-)scope and gather generated code
getCodeScoped :: FCode a -> FCode (a, CmmAGraphScoped)
getCodeScoped fcode
= do { state1 <- getState
; ((a, tscope), state2) <-
tickScope $
flip withState state1 { cgs_stmts = mkNop } $
do { a <- fcode
; scp <- getTickScope
; return (a, scp) }
; setState $ state2 { cgs_stmts = cgs_stmts state1 }
; return (a, (cgs_stmts state2, tscope)) }
-- 'getHeapUsage' applies a function to the amount of heap that it uses.
-- It initialises the heap usage to zeros, and passes on an unchanged
-- heap usage.
--
-- It is usually a prelude to performing a GC check, so everything must
-- be in a tidy and consistent state.
--
-- Note the slightly subtle fixed point behaviour needed here
getHeapUsage :: (VirtualHpOffset -> FCode a) -> FCode a
getHeapUsage fcode
= do { info_down <- getInfoDown
; state <- getState
; let fstate_in = state { cgs_hp_usg = initHpUsage }
(r, fstate_out) = doFCode (fcode hp_hw) info_down fstate_in
hp_hw = heapHWM (cgs_hp_usg fstate_out) -- Loop here!
; setState $ fstate_out { cgs_hp_usg = cgs_hp_usg state }
; return r }
-- ----------------------------------------------------------------------------
-- Combinators for emitting code
emitCgStmt :: CgStmt -> FCode ()
emitCgStmt stmt
= do { state <- getState
; setState $ state { cgs_stmts = cgs_stmts state `snocOL` stmt }
}
emitLabel :: BlockId -> FCode ()
emitLabel id = do tscope <- getTickScope
emitCgStmt (CgLabel id tscope)
emitComment :: FastString -> FCode ()
#if 0 /* def DEBUG */
emitComment s = emitCgStmt (CgStmt (CmmComment s))
#else
emitComment _ = return ()
#endif
emitTick :: CmmTickish -> FCode ()
emitTick = emitCgStmt . CgStmt . CmmTick
emitUnwind :: GlobalReg -> CmmExpr -> FCode ()
emitUnwind g e = do
dflags <- getDynFlags
when (debugLevel dflags > 0) $
emitCgStmt $ CgStmt $ CmmUnwind g e
emitAssign :: CmmReg -> CmmExpr -> FCode ()
emitAssign l r = emitCgStmt (CgStmt (CmmAssign l r))
emitStore :: CmmExpr -> CmmExpr -> FCode ()
emitStore l r = emitCgStmt (CgStmt (CmmStore l r))
newLabelC :: FCode BlockId
newLabelC = do { u <- newUnique
; return $ mkBlockId u }
emit :: CmmAGraph -> FCode ()
emit ag
= do { state <- getState
; setState $ state { cgs_stmts = cgs_stmts state MkGraph.<*> ag } }
emitDecl :: CmmDecl -> FCode ()
emitDecl decl
= do { state <- getState
; setState $ state { cgs_tops = cgs_tops state `snocOL` decl } }
emitOutOfLine :: BlockId -> CmmAGraphScoped -> FCode ()
emitOutOfLine l (stmts, tscope) = emitCgStmt (CgFork l stmts tscope)
emitProcWithStackFrame
:: Convention -- entry convention
-> Maybe CmmInfoTable -- info table?
-> CLabel -- label for the proc
-> [CmmFormal] -- stack frame
-> [CmmFormal] -- arguments
-> CmmAGraphScoped -- code
-> Bool -- do stack layout?
-> FCode ()
emitProcWithStackFrame _conv mb_info lbl _stk_args [] blocks False
= do { dflags <- getDynFlags
; emitProc_ mb_info lbl [] blocks (widthInBytes (wordWidth dflags)) False
}
emitProcWithStackFrame conv mb_info lbl stk_args args (graph, tscope) True
-- do layout
= do { dflags <- getDynFlags
; let (offset, live, entry) = mkCallEntry dflags conv args stk_args
graph' = entry MkGraph.<*> graph
; emitProc_ mb_info lbl live (graph', tscope) offset True
}
emitProcWithStackFrame _ _ _ _ _ _ _ = panic "emitProcWithStackFrame"
emitProcWithConvention :: Convention -> Maybe CmmInfoTable -> CLabel
-> [CmmFormal]
-> CmmAGraphScoped
-> FCode ()
emitProcWithConvention conv mb_info lbl args blocks
= emitProcWithStackFrame conv mb_info lbl [] args blocks True
emitProc :: Maybe CmmInfoTable -> CLabel -> [GlobalReg] -> CmmAGraphScoped
-> Int -> FCode ()
emitProc mb_info lbl live blocks offset
= emitProc_ mb_info lbl live blocks offset True
emitProc_ :: Maybe CmmInfoTable -> CLabel -> [GlobalReg] -> CmmAGraphScoped
-> Int -> Bool -> FCode ()
emitProc_ mb_info lbl live blocks offset do_layout
= do { dflags <- getDynFlags
; l <- newLabelC
; let
blks = labelAGraph l blocks
infos | Just info <- mb_info = mapSingleton (g_entry blks) info
| otherwise = mapEmpty
sinfo = StackInfo { arg_space = offset
, updfr_space = Just (initUpdFrameOff dflags)
, do_layout = do_layout }
tinfo = TopInfo { info_tbls = infos
, stack_info=sinfo}
proc_block = CmmProc tinfo lbl live blks
; state <- getState
; setState $ state { cgs_tops = cgs_tops state `snocOL` proc_block } }
getCmm :: FCode () -> FCode CmmGroup
-- Get all the CmmTops (there should be no stmts)
-- Return a single Cmm which may be split from other Cmms by
-- object splitting (at a later stage)
getCmm code
= do { state1 <- getState
; ((), state2) <- withState code (state1 { cgs_tops = nilOL })
; setState $ state2 { cgs_tops = cgs_tops state1 }
; return (fromOL (cgs_tops state2)) }
mkCmmIfThenElse :: CmmExpr -> CmmAGraph -> CmmAGraph -> FCode CmmAGraph
mkCmmIfThenElse e tbranch fbranch = do
tscp <- getTickScope
endif <- newLabelC
tid <- newLabelC
fid <- newLabelC
return $ catAGraphs [ mkCbranch e tid fid Nothing
, mkLabel tid tscp, tbranch, mkBranch endif
, mkLabel fid tscp, fbranch, mkLabel endif tscp ]
mkCmmIfGoto :: CmmExpr -> BlockId -> FCode CmmAGraph
mkCmmIfGoto e tid = do
endif <- newLabelC
tscp <- getTickScope
return $ catAGraphs [ mkCbranch e tid endif Nothing, mkLabel endif tscp ]
mkCmmIfThen :: CmmExpr -> CmmAGraph -> FCode CmmAGraph
mkCmmIfThen e tbranch = do
endif <- newLabelC
tid <- newLabelC
tscp <- getTickScope
return $ catAGraphs [ mkCbranch e tid endif Nothing
, mkLabel tid tscp, tbranch, mkLabel endif tscp ]
mkCall :: CmmExpr -> (Convention, Convention) -> [CmmFormal] -> [CmmActual]
-> UpdFrameOffset -> [CmmActual] -> FCode CmmAGraph
mkCall f (callConv, retConv) results actuals updfr_off extra_stack = do
dflags <- getDynFlags
k <- newLabelC
tscp <- getTickScope
let area = Young k
(off, _, copyin) = copyInOflow dflags retConv area results []
copyout = mkCallReturnsTo dflags f callConv actuals k off updfr_off extra_stack
return $ catAGraphs [copyout, mkLabel k tscp, copyin]
mkCmmCall :: CmmExpr -> [CmmFormal] -> [CmmActual] -> UpdFrameOffset
-> FCode CmmAGraph
mkCmmCall f results actuals updfr_off
= mkCall f (NativeDirectCall, NativeReturn) results actuals updfr_off []
-- ----------------------------------------------------------------------------
-- turn CmmAGraph into CmmGraph, for making a new proc.
aGraphToGraph :: CmmAGraphScoped -> FCode CmmGraph
aGraphToGraph stmts
= do { l <- newLabelC
; return (labelAGraph l stmts) }
| GaloisInc/halvm-ghc | compiler/codeGen/StgCmmMonad.hs | bsd-3-clause | 31,044 | 0 | 15 | 8,248 | 5,913 | 3,227 | 2,686 | 469 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Tinfoil.Data.Hash(
Hash(..)
, HashFunction(..)
, parseHashFunction
, renderHash
, renderHashFunction
) where
import Control.DeepSeq.Generics (genericRnf)
import Data.ByteString (ByteString)
import GHC.Generics (Generic)
import P
import Tinfoil.Encode
-- | Binary representation of a hash.
newtype Hash =
Hash {
unHash :: ByteString
} deriving (Eq, Show, Generic)
instance NFData Hash where rnf = genericRnf
renderHash :: Hash -> Text
renderHash = hexEncode . unHash
-- | Cryptographic hash function designator.
data HashFunction =
SHA256
deriving (Eq, Show, Generic, Enum, Bounded)
instance NFData HashFunction where rnf = genericRnf
renderHashFunction :: HashFunction -> Text
renderHashFunction SHA256 = "SHA256"
parseHashFunction :: Text -> Maybe' HashFunction
parseHashFunction "SHA256" = Just' SHA256
parseHashFunction _ = Nothing'
| ambiata/tinfoil | src/Tinfoil/Data/Hash.hs | bsd-3-clause | 1,075 | 0 | 6 | 215 | 227 | 133 | 94 | 31 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module GUI (gui) where
import Control.Monad
import Graphics.UI.WX (Prop(..))
import ListUtils (sortOn)
import Parser (CostCentre(..), CostCentreData(..), TimeAlloc(..))
import TextUtils (showText)
import qualified Data.Text as Text
import qualified Graphics.UI.WX as WX
import qualified Graphics.UI.WXCore as WXC
treeAppendText :: WXC.TreeCtrl a -> WXC.TreeItem -> String -> IO WXC.TreeItem
treeAppendText tree node text = WXC.treeCtrlAppendItem tree node text (-1) (-1) WX.objectNull
addCostCentres :: WXC.TreeCtrl a -> WXC.TreeItem -> [CostCentre] -> IO ()
addCostCentres tree node ccs =
forM_ (sortOn (negate . timePercent . ccInherited . ccData) ccs) $ \cc -> do
let
cd = ccData cc
text = Text.unwords
[ Text.concat [ccModule cd, ".", ccName cd]
, showText (timePercent (ccInherited cd))
, Text.concat ["(", showText (timePercent (ccIndividual cd)), ")"]
]
item <- treeAppendText tree node $ Text.unpack text
addCostCentres tree item $ ccChildren cc
gui :: [CostCentre] -> IO ()
gui costCentres = do
f <- WX.frame [WX.text := "Profile analyzer"]
tree <- WX.treeCtrl f [] -- [WX.color := WX.rgb 255 255 255, WX.bgcolor := WX.rgb 0 0 0]
root <- WXC.treeCtrlAddRoot tree "Cost centres" (-1) (-1) WX.objectNull
addCostCentres tree root costCentres
WXC.treeCtrlExpand tree root
return ()
| Peaker/HaskProfileGui | GUI.hs | bsd-3-clause | 1,396 | 0 | 20 | 262 | 499 | 263 | 236 | 31 | 1 |
#!/usr/bin/env runstaskell
import System.IO.Temp
import System.FilePath
main :: IO ()
main = withSystemTempDirectory "runstaskell-test" $ \ dir -> do
writeFile (dir </> "foo") "03-success"
putStrLn =<< readFile (dir </> "foo")
| soenkehahn/runstaskell | test/03.hs | bsd-3-clause | 233 | 0 | 12 | 36 | 72 | 37 | 35 | 6 | 1 |
module Matterhorn.Events.PostListOverlay where
import Prelude ()
import Matterhorn.Prelude
import qualified Graphics.Vty as Vty
import Matterhorn.Types
import Matterhorn.Events.Keybindings
import Matterhorn.State.PostListOverlay
onEventPostListOverlay :: Vty.Event -> MH ()
onEventPostListOverlay =
void . handleKeyboardEvent postListOverlayKeybindings (const $ return ())
-- | The keybindings we want to use while viewing a post list overlay
postListOverlayKeybindings :: KeyConfig -> KeyHandlerMap
postListOverlayKeybindings = mkKeybindings postListOverlayKeyHandlers
postListOverlayKeyHandlers :: [KeyEventHandler]
postListOverlayKeyHandlers =
[ mkKb CancelEvent "Exit post browsing" exitPostListMode
, mkKb SelectUpEvent "Select the previous message" postListSelectUp
, mkKb SelectDownEvent "Select the next message" postListSelectDown
, mkKb FlagMessageEvent "Toggle the selected message flag" postListUnflagSelected
, mkKb ActivateListItemEvent "Jump to and select current message" postListJumpToCurrent
]
| matterhorn-chat/matterhorn | src/Matterhorn/Events/PostListOverlay.hs | bsd-3-clause | 1,083 | 0 | 10 | 177 | 166 | 93 | 73 | 19 | 1 |
module Data.Origami.Internal.TestFiles.Param where
data ParamTy a b c d = ParamTy
type ParamTySyn a b c d = ParamTy a a a a
data PT = PT (ParamTy String String String String)
data PTS = PTS (ParamTySyn String Char Bool ())
| nedervold/origami | tests/Data/Origami/Internal/TestFiles/Param.hs | bsd-3-clause | 227 | 0 | 9 | 45 | 83 | 50 | 33 | 5 | 0 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-methods #-}
-- | 'JSChan' provides the same functionality and
-- concurrency abstraction in Javascript computations
-- as 'Control.Concurrent.Chan' in Haskell.
module Language.Sunroof.JS.Chan
( JSChan
, newChan
, writeChan, readChan
) where
import Data.Boolean ( IfB(..), EqB(..), BooleanOf )
import Language.Sunroof.Classes
import Language.Sunroof.Types
import Language.Sunroof.Concurrent ( forkJS )
import Language.Sunroof.Selector ( (!) )
import Language.Sunroof.JS.Bool
import Language.Sunroof.JS.Object ( JSObject )
import Language.Sunroof.JS.Array
( JSArray
, newArray, length'
, push, shift )
-- -------------------------------------------------------------
-- JSChan Type
-- -------------------------------------------------------------
-- | 'JSChan' abstraction. The type parameter gives
-- the type of values held in the channel.
newtype JSChan a = JSChan JSObject
instance (SunroofArgument o) => Show (JSChan o) where
show (JSChan o) = show o
instance (SunroofArgument o) => Sunroof (JSChan o) where
unbox (JSChan o) = unbox o
box o = JSChan (box o)
instance (SunroofArgument o) => IfB (JSChan o) where
ifB = jsIfB
type instance BooleanOf (JSChan o) = JSBool
instance (SunroofArgument o) => JSTuple (JSChan o) where
type Internals (JSChan o) =
( (JSArray (JSContinuation (JSContinuation o))) -- callbacks of written data
, (JSArray (JSContinuation o)) -- callbacks of waiting readers
)
match o = (o ! attr "written", o ! attr "waiting")
tuple (written,waiting) = do
o <- new "Object" ()
o # attr "written" := written
o # attr "waiting" := waiting
return (JSChan o)
-- | Reference equality, not value equality.
instance (SunroofArgument o) => EqB (JSChan o) where
(JSChan a) ==* (JSChan b) = a ==* b
-- -------------------------------------------------------------
-- JSChan Combinators
-- -------------------------------------------------------------
-- | Create a new empty 'JSChan'.
newChan :: (SunroofArgument a) => JS t (JSChan a)
newChan = do
written <- newArray ()
waiting <- newArray ()
tuple (written, waiting)
-- | Put a value into the channel. This will never block.
writeChan :: forall t a . (SunroofThread t, SunroofArgument a) => a -> JSChan a -> JS t ()
writeChan a (match -> (written,waiting)) = do
ifB ((waiting ! length') ==* 0)
(do f <- continuation $ \ (k :: JSContinuation a) -> goto k a :: JSB ()
_ <- written # push (f :: JSContinuation (JSContinuation a))
return ()
)
(do f <- shift waiting
forkJS (goto f a :: JSB ())
)
-- | Take a value out of the channel. If there is no value
-- inside, this will block until one is available.
readChan :: forall a . (SunroofArgument a) => JSChan a -> JS 'B a
readChan (match -> (written,waiting)) = do
ifB ((written ! length') ==* 0)
(do -- Add yourself to the 'waiting for writer' Q.
callcc $ \ k -> do _ <- waiting # push (k :: JSContinuation a)
done
)
(do f <- shift written
-- Here, we add our continuation into the written Q.
callcc $ \ k -> goto f k
)
| ku-fpg/sunroof-compiler | Language/Sunroof/JS/Chan.hs | bsd-3-clause | 3,405 | 0 | 19 | 744 | 943 | 507 | 436 | 64 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Sys.ExitCode(
ExitCode
, _ExitFailure
, _ExitSuccess
, exitFailure
, exitSuccess
, exitFailureP
, exitSuccessP
, exitCode
, unExitCode
) where
import Control.Lens(Prism', prism', (#))
import Data.Int(Int)
import Data.Maybe(Maybe(Nothing, Just))
import Data.NotZero(NotZero, notZero, notZeroElse, notZero1)
import Data.NotZeroOr(Number, NotZeroOr(IsNotZero, OrNotZero), _IsNotZero, _OrNotZero)
import qualified System.Exit as Exit
type ExitCode =
Number Int
_ExitFailure ::
Prism' ExitCode (NotZero Int)
_ExitFailure =
_IsNotZero
_ExitSuccess ::
Prism' ExitCode ()
_ExitSuccess =
_OrNotZero
exitFailure ::
NotZero Int
-> ExitCode
exitFailure =
(_ExitFailure #)
exitSuccess ::
ExitCode
exitSuccess =
_ExitSuccess # ()
exitFailureP ::
Prism' Exit.ExitCode Int
exitFailureP =
prism'
Exit.ExitFailure
(\x -> case x of
Exit.ExitFailure y ->
Just y
Exit.ExitSuccess ->
Nothing)
exitSuccessP ::
Prism' Exit.ExitCode ()
exitSuccessP =
prism'
(\() -> Exit.ExitSuccess)
(\x -> case x of
Exit.ExitFailure _ ->
Nothing
Exit.ExitSuccess ->
Just ())
exitCode ::
ExitCode
-> Exit.ExitCode
exitCode (IsNotZero a) =
Exit.ExitFailure (notZero # a)
exitCode (OrNotZero ()) =
Exit.ExitSuccess
unExitCode ::
Exit.ExitCode
-> ExitCode
unExitCode (Exit.ExitSuccess) =
exitSuccess
unExitCode (Exit.ExitFailure 0) =
exitSuccess
unExitCode (Exit.ExitFailure n) =
exitFailure (notZeroElse notZero1 n)
| NICTA/sys-process | src/Sys/ExitCode.hs | bsd-3-clause | 1,586 | 0 | 12 | 347 | 468 | 262 | 206 | 72 | 2 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-|
Module: Control.Remote.Monad.JSON.Debug where
Copyright: (C) 2015, The University of Kansas
License: BSD-style (see the file LICENSE)
Maintainer: Justin Dawson
Stability: Alpha
Portability: GHC
-}
module Control.Remote.Monad.JSON.Trace where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Natural
import Control.Remote.Monad.JSON.Router (Call (..))
import Control.Remote.Monad.JSON.Types
import Data.Aeson
import qualified Data.Text.Lazy as LT
import Data.Text.Lazy.Encoding (decodeUtf8)
-- | A tracing natural transformation morphism over the Session API.
traceSendAPI :: MonadIO m => String -> (SendAPI :~> m) -> (SendAPI :~> m)
traceSendAPI msg f = wrapNT $ \ case
(Sync v) -> do
liftIO $ putStrLn $ msg ++ "--> " ++ LT.unpack (decodeUtf8 (encode v))
r <- f # (Sync v)
liftIO $ putStrLn $ msg ++ "<-- " ++ LT.unpack (decodeUtf8 (encode r))
return r
(Async v) -> do
liftIO $ putStrLn $ msg ++ "--> " ++ LT.unpack (decodeUtf8 (encode v))
() <- f # (Async v)
liftIO $ putStrLn $ msg ++ "// No response"
return ()
-- | A tracing natural transformation morphism over the Receive API.
traceReceiveAPI :: MonadIO m => String -> (ReceiveAPI :~> m) -> (ReceiveAPI :~> m)
traceReceiveAPI msg f = wrapNT $ \ (Receive v) -> do
liftIO $ putStrLn $ msg ++ "--> " ++ LT.unpack (decodeUtf8 (encode v))
r <- f # (Receive v)
case r of
Nothing -> liftIO $ putStrLn $ msg ++ "// No response"
Just _ -> liftIO $ putStrLn $ msg ++ "<-- " ++ LT.unpack (decodeUtf8 (encode r))
return r
-- | A tracing natural transformation morphism over the Call API.
traceCallAPI :: MonadIO m => String -> (Call :~> m) -> (Call :~> m)
traceCallAPI msg f = wrapNT $ \ case
p@(CallMethod nm args) -> do
let method = Method nm args :: Prim Value
liftIO $ putStrLn $ msg ++ " method " ++ show method
r <- f # p
liftIO $ putStrLn $ msg ++ " return " ++ LT.unpack (decodeUtf8 (encode r))
return r
p@(CallNotification nm args) -> do
let n = Notification nm args
liftIO $ putStrLn $ msg ++ " notification " ++ show n
f # p
| ku-fpg/remote-json | Control/Remote/Monad/JSON/Trace.hs | bsd-3-clause | 2,702 | 0 | 17 | 798 | 747 | 383 | 364 | 49 | 2 |
-- |
-- The modified Shunting-Yard algorithm. The modifications allow function
-- application by juxtaposition (without any paren around the arguments)
-- and distfix operators. For a normal usage, it should be enough
-- to import only 'Text.Syntactical', not directly this module.
-- Note: The parser allows applying a number to another,
-- e.g. 1 2. Maybe this could be turned into an option.
-- The proper way to forbid such 'number application' is
-- to use some type-checking. If 1 2 should be disallowed,
-- 1 (2 + 3) or 1 a should be disallowed too. The 'apply'
-- function seems a good place to implement such restriction.
module Text.Syntactical.Yard (
Shunt(..), Failure(..), Rule(..),
initial, isDone, shunt, step, steps, showFailure
) where
import Data.List (intersperse)
import Text.Syntactical.Data (
SExpr(..), Tree(..),
Hole(..), Part(..), Table, Priority(..),
begin, end, leftOpen, rightOpen, rightHole, discard,
applicator, applicator', continue, original, priority,
arity, symbol, next, current,
findBoth, findBegin, FindBegin(..), FindBoth(..), Ambiguity(..),
Token, toString, operator,
showPart, showSExpr, showTree
)
----------------------------------------------------------------------
-- Data structures to support the shunting-yard algorithm
----------------------------------------------------------------------
-- An applicator is a non-operator (i.e. a symbol or a list) applied
-- to some arguments. When such a symbol is read, it is placed on the
-- operator stack. If there is already such a symbol on the stack, it
-- goes straight to the output stack (this is the Argument case).
data Rule a = Initial
| Argument -- straight to the output stack
| Application -- apply an applicator
| ApplyOp -- apply an operator
| StackApp -- push an applicator to the stack
| StackL -- push the first part of a closed or prefix operator
| StackOp -- push a new operator part to the stack
| ContinueOp -- append an operator part to the operator
-- at the top of the stack
| MatchedR -- handle the last part of a closed operator
| SExpr -- build an s-expression
| Done (Result a)
deriving (Show, Eq)
isInitial :: Rule a -> Bool
isInitial Initial = True
isInitial _ = False
stackedOp :: Rule a -> Bool
stackedOp StackL = True
stackedOp StackOp = True
stackedOp ContinueOp = True
stackedOp _ = False
data Result a =
Success -- everything is successfuly parsed
| Failure (Failure a)
deriving (Eq, Show)
-- | The different failure cases the 'shunt' function can return.
-- The 'showFailure' function can be used to give them a textual
-- representation.
data Failure a =
MissingBefore [[a]] a -- ^ missing parts before part
| MissingAfter [a] [a] -- ^ missing parts after parts
| CantMix (Part a) (Part a) -- ^ can't mix two operators
| MissingSubBetween a a -- ^ missing sub-expression between parts
| MissingSubBefore a -- ^ missing sub-expression before string
| MissingSubAfter a -- ^ missing sub-expression after string
| Ambiguity Ambiguity -- ^ a part is used ambiguously in multiple operators
| Unexpected -- ^ this is a bug if it happens
deriving (Eq, Show)
failure :: Failure a -> Rule a
failure f = Done $ Failure f
-- The state of the shunting-yard. The input and output types are the same.
-- The operator stack can hold parts in addition to the atoms and lists.
-- This imply conversions (using s2t and t2s) that would be avoided by
-- using the Tree type for the input and the output. But ruling out the
-- invalid input and output (those containing parts) seems better.
data Shunt a = S
[SExpr a] -- list of tokens (Nodes can be pushed back.)
[Tree a] -- stack of operators and applicators
[[SExpr a]] -- stack of stacks
(Rule a)
isDone :: Shunt a -> Bool
isDone (S _ _ _ (Done _)) = True
isDone _ = False
-- Set the rule of a Shunt structure.
rule :: Shunt a -> Rule a -> Shunt a
rule (S tt st oo _) = S tt st oo
-- Construct the initial state of the shunting-yard from a given input list.
initial :: [SExpr a] -> Shunt a
initial ts = S ts [] [[]] Initial
----------------------------------------------------------------------
-- The modified shunting-yard algorithm
----------------------------------------------------------------------
-- | Parse a list of s-expressions according to an operator table.
-- Usually the s-expressions will be the result of applying 'Atom'
-- to each token.
shunt :: Token a => Table a -> [SExpr a] -> Either (Failure a) (SExpr a)
shunt table ts = case fix $ initial ts of
S [] [] [[o']] (Done Success) -> Right o'
S _ _ _ (Done (Failure f)) -> Left f
_ -> error "can't happen" -- the Success case has only the previous form.
where fix s = let s' = step table s in
if isDone s' then s' else fix s'
-- Perfom one step of the shunting-yard, moving it from one state to the next.
step :: Token a => Table a -> Shunt a -> Shunt a
-- There is a complete Closed or Postifx operator on the top of the stack.
step _ (S tt (s@(Part y):ss) oo@(os:oss) _) | end y && not (rightOpen y)
= if discard y
then let (o:os') = os in S (o:tt) ss (os':oss) MatchedR
else let ((o:os'):oss') = apply s oo in S (o:tt) ss (os':oss') MatchedR
-- An applicator is on the input stack.
step table (S (t:ts) st@(s:_) oo@(os:oss) _)
| applicator table t = case s of
Part y
| rightHole y == Just SExpression ->
S ts st ((t:os):oss) SExpr
| otherwise ->
S ts (s2t t:st) ([]:oo) StackApp
Leaf _ -> S ts st ((t:os):oss) Argument
Branch _ -> S ts st ((t:os):oss) Argument
-- An operator part is on the input stack and an applicator is on
-- the stack.
step table (S tt@(Atom x:ts) st@(s:ss) oo _)
| applicator' table s =
case findBoth table x st of
BBegin pt1
| not (leftOpen pt1) && rightHole pt1 == Just SExpression ->
S ts (Part pt1:st) ([]:oo) StackL
| not (leftOpen pt1) ->
S ts (Part pt1:st) oo StackL
_ ->
S tt ss (apply s oo) Application
-- An operator part is on the input stack and on the stack.
step table sh@(S tt@(t@(Atom x):ts) st@(s@(Part y):ss) oo@(os:oss) ru) =
case findBoth table x st of
BContinue pt1 -> go pt1
BBegin pt1 -> go pt1
BMissingBegin ps -> rule sh (failure $ ps `MissingBefore` x)
BNothing -> error "can't happen" -- x is in the table for sure
BAmbiguous amb -> rule sh (failure $ Ambiguity amb)
where
go pt1
| rightHole y == Just SExpression && pt1 `continue` y && stackedOp ru =
let ([]:h:oss') = oo
in S ts (Part pt1:ss) ((List []:h):oss') ContinueOp
| rightHole y == Just SExpression && pt1 `continue` y =
let os':h:oss' = oo
ap = List (reverse os')
in S ts (Part pt1:ss) ((ap:h):oss') ContinueOp
| rightHole pt1 == Just Distfix && rightHole y == Just SExpression =
S ts (Part pt1:st) oo StackL
| rightHole pt1 == Just SExpression =
S ts (Part pt1:st) ([]:oo) StackL
| rightHole y == Just SExpression =
S ts st ((t:os):oss) SExpr
| rightOpen y && leftOpen pt1 && stackedOp ru =
rule sh (failure $ symbol y `MissingSubBetween` x)
| pt1 `continue` y = S ts (Part pt1:ss) oo ContinueOp
| not (leftOpen pt1) && begin pt1 = S ts (Part pt1:st) oo StackL
| otherwise = case pt1 `priority` y of
Lower -> S tt ss (apply s oo) ApplyOp
Higher -> S ts (Part pt1:st) oo StackOp
NoPriority -> rule sh (failure $ CantMix pt1 y)
-- No more tokens on the input stack, just have to flush
-- the remaining applicators and/or operators.
step _ sh@(S [] (s:ss) oo ru) = case s of
Leaf _ -> S [] ss (apply s oo) Application
Branch _ -> S [] ss (apply s oo) Application
Part y | end y && rightOpen y && stackedOp ru ->
rule sh (failure $ MissingSubAfter $ symbol y)
-- The infix or prefix operator has all its parts.
-- The postfix/closed is handled in the first equation.
| end y ->
S [] ss (apply s oo) ApplyOp
| otherwise ->
-- The operator is not complete.
rule sh (failure $
next y `MissingAfter` current y)
-- The applicator/operator stack is empty.
step table sh@(S (t:ts) [] oo ru) = case t of
List _ -> S ts [s2t t] ([]:oo) StackApp
Atom x -> case findBegin table x of
NoBegin -> S ts [s2t t] ([]:oo) StackApp
-- x is the first sub-op, and the stack is empty
Begin pt1 -> go pt1
MissingBegin xs -> rule sh (failure $ xs `MissingBefore` x)
AmbiguousBegin amb -> rule sh (failure $ Ambiguity amb)
where
go pt1
| leftOpen pt1 && isInitial ru =
rule sh (failure $ MissingSubBefore $ symbol pt1)
| leftOpen pt1 =
S ts [Part pt1] oo StackOp
| rightHole pt1 == Just SExpression =
S ts [Part pt1] ([]:oo) StackL
| otherwise =
S ts [Part pt1] oo StackL
-- Everything is done and fine.
step _ sh@(S [] [] [[_]] _) = rule sh $ Done Success
-- This equation should never be reached; otherwise it is a bug.
step _ sh = rule sh (failure Unexpected)
-- Construct a new output stack by applying an operator,
-- a symbol, or a list to the top of the output stack.
apply :: Token a => Tree a -> [[SExpr a]] -> [[SExpr a]]
apply (Part y) (os:oss) | end y =
if length l /= nargs
then error "can't happen" -- holes are always filled by one expression
else (operator (original y) (reverse l) : r) : oss
where nargs = arity y
(l,r) = splitAt nargs os
apply (Leaf x) (os:h:oss) = (ap:h):oss
where ap = if null os then Atom x else List (Atom x:reverse os)
apply (Branch xs) (os:h:oss) = (ap:h):oss
where ap = if null os then List (map t2s xs) else List (List (map t2s xs):reverse os)
apply _ _ = error "can't happen"
----------------------------------------------------------------------
-- Visualize the sunting-yard algorithm steps
----------------------------------------------------------------------
-- | Similar to the 'shunt' function but print the steps
-- performed by the modified shunting yard algorithm.
-- This function is useful to understand (and debug) the
-- modified shunting-yard algorithm.
steps :: Token a => Table a -> [SExpr a] -> IO ()
steps table ts = do
putStrLn " Input Stack Output Rule"
let sh = iterate (step table) $ initial ts
l = length $ takeWhile (not . isDone) sh
mapM_ (putStrLn . showShunt) (take (l + 1) sh)
----------------------------------------------------------------------
-- Convenience functions used in step and apply
----------------------------------------------------------------------
-- Convert a SExpr to a Tree
s2t :: SExpr a -> Tree a
s2t (Atom x) = Leaf x
s2t (List xs) = Branch $ map s2t xs
-- Convert a Tree to a SExpr (partial function)
t2s :: Tree a -> SExpr a
t2s (Leaf x) = Atom x
t2s (Branch xs) = List $ map t2s xs
-- The 'operator' function is used in this case
t2s (Part _) = error "can't convert a Tree Part to a SExpr"
----------------------------------------------------------------------
-- A few 'show' functions for Failure, Rule, and Shunt
----------------------------------------------------------------------
-- | Give a textual representation of a 'Failure'.
showFailure :: Token a => Failure a -> String
showFailure f = case f of
MissingBefore ps p ->
"Parse error: missing operator parts " ++
concatMap (unwords . map toString) ps ++
" before " ++ toString p
MissingAfter p ps ->
"Parse error: missing operator part " ++
concat (intersperse ", " $ map toString p) ++ " after " ++
unwords (map toString ps)
CantMix a b ->
"Parse error: cannot mix operators " ++ showPart a ++
" and " ++ showPart b
MissingSubBetween a b ->
"Parse error: no sub-expression between " ++ toString a ++
" and " ++ toString b
MissingSubBefore a ->
"Parse error: no sub-expression before " ++ toString a
MissingSubAfter a ->
"Parse error: no sub-expression after " ++ toString a
Ambiguity _ ->
"Parse error: the symbol is an ambiguous part"
Unexpected ->
"Parsing raised a bug"
showRule :: Token a => Rule a -> String
showRule ru = case ru of
Initial -> "Initial"
Argument -> "Argument"
Application -> "Application"
StackApp -> "StackApp"
ApplyOp -> "ApplyOp"
StackL -> "StackL"
StackOp -> "StackOp"
ContinueOp -> "ContinueOp"
MatchedR -> "MatchedR"
SExpr -> "SExpr"
Done result -> case result of
Success -> "Success"
Failure f -> "Failure:\n" ++ showFailure f
showShunt :: Token a => Shunt a -> String
showShunt (S ts ss os ru) =
pad 20 ts ++ pad' 20 ss ++ pads 20 os ++ " " ++ showRule ru
bracket :: [String] -> String
bracket s = "[" ++ (concat . intersperse ",") s ++ "]"
pad' :: Token a => Int -> [Tree a] -> String
pad' n s =
let s' = bracket . map showTree $ s
in replicate (n - length s') ' ' ++ s'
pad :: Token a => Int -> [SExpr a] -> String
pad n s =
let s' = bracket . map showSExpr $ s
in replicate (n - length s') ' ' ++ s'
pads :: Token a => Int -> [[SExpr a]] -> String
pads n s =
let s' = bracket .
map (bracket . map showSExpr) $ s
in replicate (n - length s') ' ' ++ s'
| noteed/syntactical | Text/Syntactical/Yard.hs | bsd-3-clause | 13,347 | 0 | 16 | 3,228 | 4,147 | 2,107 | 2,040 | 231 | 17 |
module Main where
import Test.Tasty ( TestTree
, defaultIngredients
, defaultMainWithIngredients
, testGroup
)
import Test.Tasty.Ingredients ( Ingredient)
import Test.Tasty.Runners.AntXML ( antXMLRunner )
ingredients :: [Ingredient]
ingredients = antXMLRunner : defaultIngredients
main :: IO ()
main = do
defaultMainWithIngredients ingredients tests
tests :: TestTree
tests = testGroup "Tests"
[
]
| creswick/minServant | tests/Main.hs | bsd-3-clause | 629 | 0 | 7 | 281 | 103 | 60 | 43 | 15 | 1 |
module Main (main) where
import D6Lib
import System.Environment (getArgs)
main :: IO ()
main = do
file <- head <$> getArgs
fileLines <- lines <$> readFile file
let msg = fixMessage fileLines
putStrLn $ "message: " ++ msg
let msg' = fixModMessage fileLines
putStrLn $ "message using modified rep code: " ++ msg'
| wfleming/advent-of-code-2016 | 2016/app/D6.hs | bsd-3-clause | 329 | 0 | 10 | 71 | 108 | 53 | 55 | 11 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
import Diagrams.Prelude
import Diagrams.Backend.Canvas.CmdLine
main = mainWith ((square 200 # fc blue <> circle 300)
# lw thick
# fc red
# frame 10 :: Diagram Canvas R2)
| ku-fpg/diagrams-canvas | examples/Circle.hs | bsd-3-clause | 258 | 0 | 13 | 77 | 74 | 37 | 37 | 7 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Web.Slack
import Web.Slack.Message
import System.Environment (lookupEnv)
import Data.Maybe (fromMaybe)
import Control.Applicative
import Data.Text (Text)
import qualified Data.Text as T
import System.Environment
import Data.Foldable
import Network.Wreq
import Control.Lens
import Data.Aeson.Lens
import Reply
myConfig :: String -> SlackConfig
myConfig apiToken = SlackConfig
{ _slackApiToken = apiToken -- Specify your API token here
}
main :: IO ()
main = do
apiToken <- fromMaybe (error "SLACK_API_TOKEN not set")
<$> lookupEnv "SLACK_API_TOKEN"
args <- getArgs
scripts <- mapM getLines args
let opts = defaults & param "token" .~ [T.pack apiToken]
r <- getWith opts "https://slack.com/api/channels.list"
let generalId' = r ^? responseBody . key "channels"
. values
. filtered (\c -> c ^. key "name" . _String == "general")
. key "id" . _String
generalId = maybe (error "#general not found") id generalId'
bot = filterBot (\i -> view getId i /= generalId) $ textBot (reply scripts)
runBot (myConfig apiToken) bot ()
reply :: [Script] -> Text -> Maybe Text
reply scripts query = asum . map (getReply query) $ scripts
textBot :: (Text -> Maybe Text) -> SlackBot ()
textBot f (Message cid _ msg _ _ _)
| (Just response) <- f msg = sendMessage cid response
textBot _ _ = return ()
-- | A bot middleware that filters channels based on the ChannelId
filterBot :: (ChannelId -> Bool) -> SlackBot a -> SlackBot a
filterBot f _ (Message cid _ _ _ _ _) | not (f cid) = return ()
filterBot _ b m = b m
| madjar/jmt | Main.hs | bsd-3-clause | 1,737 | 0 | 19 | 439 | 575 | 290 | 285 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Bandits.Backend.HRR where
import Bandits.Experiment.Instructions
import Bandits.Experiment.MultiarmedBandit
import Bandits.Experiment.Types
import Control.Monad.Free
import Data.Convertible
import Data.Functor.Sum
import Database.HDBC.Query.TH
import qualified Database.HDBC.Record.Insert as Insert
import qualified Database.HDBC.Record.Query as Query
import Database.HDBC.Record.Statement
import Database.HDBC.Record.TH
import qualified Database.HDBC.Record.Update as Update
import Database.HDBC.SqlValue
import Database.HDBC.Types (IConnection)
import Database.Record
import Database.Record.Persistable
import Database.Relational.Query
import GHC.Generics
import Language.Haskell.TH.Name.CamelCase
-- We need some specific instances for HDBC.
-- as we don't want to leak HDBC in our core logic
-- we define orphan instances here. As we have
-- Multiparamtypeclasses here we cannot use
-- standalonederiving.
instance Convertible SqlValue Variation where
safeConvert s = MkVariation <$> safeConvert s
instance Convertible Variation SqlValue where
safeConvert (MkVariation s) = safeConvert s
$(derivePersistableInstanceFromValue [t| Variation |])
deriving instance PersistableWidth Variation
deriving instance ShowConstantTermsSQL Variation
instance Convertible SqlValue ExperimentId where
safeConvert s = MkExperimentId <$> safeConvert s
instance Convertible ExperimentId SqlValue where
safeConvert (MkExperimentId s) = safeConvert s
$(derivePersistableInstanceFromValue [t| ExperimentId |])
deriving instance PersistableWidth ExperimentId
deriving instance ShowConstantTermsSQL ExperimentId
instance Convertible SqlValue UserId where
safeConvert s = MkUserId <$> safeConvert s
instance Convertible UserId SqlValue where
safeConvert (MkUserId s) = safeConvert s
$(derivePersistableInstanceFromValue [t| UserId |])
deriving instance PersistableWidth UserId
deriving instance ShowConstantTermsSQL UserId
instance Convertible SqlValue Reward where
safeConvert s = MkReward <$> safeConvert s
instance Convertible Reward SqlValue where
safeConvert (MkReward s) = safeConvert s
$(derivePersistableInstanceFromValue [t| Reward |])
deriving instance PersistableWidth Reward
deriving instance ShowConstantTermsSQL Reward
instance Convertible SqlValue BanditType where
safeConvert s = read <$> safeConvert s
instance Convertible BanditType SqlValue where
safeConvert = safeConvert . show
instance FromSql SqlValue BanditType where
recordFromSql = valueFromSql
instance ToSql SqlValue BanditType where
recordToSql = valueToSql
instance PersistableWidth BanditType where
persistableWidth = unsafeValueWidth
-- Table definition for relational record.
$(defineTableDefault
defaultConfig
-- schema name
"bandits"
-- table name
"assignment"
-- columns
[ ("as_experiment_id", [t| ExperimentId |])
, ("as_user_id", [t| UserId |])
, ("as_variation", [t| Variation |])
, ("as_reward", [t| Reward |])
]
-- derivings
[toConName "Eq", toConName "Show", toConName "Generic"]
-- primary key columns
[0, 1]
-- not null column?
Nothing
)
-- | Hides the presence of the HDBC connection.
type RunHRRBackend a = forall c. IConnection c => c -> IO a
runExperiment1 :: Free (Sum ExpInstr BanditInstr) a -> RunHRRBackend a
runExperiment1 m conn = iterM run m
where
run :: Sum ExpInstr BanditInstr (IO a) -> IO a
run (InL (LookupAssignment eid uid k)) = do
undefined
run (InL (NewAssignment eid uid k)) = do
arm <- runExperiment1 (mkBandit undefined) conn
k arm
run (InL (NewReward eid uid rew k)) = do
undefined
run (InR (RandomProbability k)) = do
undefined
run (InR (RandomArm k)) = do
undefined
run (InR (ChooseArm i k)) = do
undefined
run (InR (Scan f n k)) = do
undefined
run (InR (Collect f k)) = do
undefined
-- | Runs an experiment in the Free monad constructor.
runExperiment :: Free ExpInstr a -> RunHRRBackend a
runExperiment m conn = iterM run m
where
run :: ExpInstr (IO a) -> IO a
run (LookupAssignment eid uid k) = do
a <- queryAssignment eid uid conn
k a
run (NewAssignment eid uid k) = do
insertAssignment' eid uid undefined conn
k undefined
run (NewReward eid uid rew k) = do
updateReward eid uid rew conn
k
-- | Queries the database
queryAssignment :: ExperimentId -> UserId -> RunHRRBackend (Maybe Variation)
queryAssignment eid uid conn = do
ps <- Query.prepare conn selectAssignment
es <- execute (bind ps (eid, uid))
ma <- Query.fetchUnique' es
return $ asVariation <$> ma
updateReward :: ExperimentId -> UserId -> Reward -> RunHRRBackend ()
updateReward eid uid rew conn = do
ps <- Update.prepareUpdate conn upd
_ <- Update.runPreparedUpdate ps ()
return ()
where
upd =
typedUpdate tableOfAssignment . updateTarget $ \proj -> do
asReward' <-# value rew
wheres $ proj ! asExperimentId' .=. value eid
wheres $ proj ! asUserId' .=. value uid
wheres $ proj ! asReward' .=. value (MkReward 0.0)
insertAssignment' :: ExperimentId -> UserId -> Variation -> RunHRRBackend ()
insertAssignment' eid uid var conn = do
ps <- Insert.prepareInsert conn insertAssignment
_ <- Insert.runPreparedInsert ps Assignment { asExperimentId = eid
, asUserId = uid
, asVariation = var
, asReward = MkReward 0.0
}
return ()
| alexbiehl/bandits | src/Bandits/Backend/HRR.hs | bsd-3-clause | 6,215 | 0 | 14 | 1,497 | 1,525 | 797 | 728 | 137 | 8 |
module HandleHelperMUnitTests (handleHelperMUnitTests) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import Test.Tasty
import Test.Tasty.HUnit
import Game.LambdaHack.Client.UI.HandleHelperM
import UnitTestHelpers
handleHelperMUnitTests :: TestTree
handleHelperMUnitTests = testGroup "handleHelperMUnitTests"
[ testCase "partyAfterLeader" $ do
-- You've got to fight for your right to party!
let testFunc = partyAfterLeader testActorId
partyInMonad <- executorCli testFunc testCliStateWithItem
let party = fst partyInMonad
party @?= []
]
| LambdaHack/LambdaHack | test/HandleHelperMUnitTests.hs | bsd-3-clause | 591 | 0 | 13 | 98 | 115 | 63 | 52 | 14 | 1 |
{-# LANGUAGE ViewPatterns, QuasiQuotes, FlexibleContexts, CPP #-}
-- | Macros allow users to access more advanced functionality from within Markdown syntax. There are two types
-- of macros, block and inline, which allow substitution of 'Block' and 'Inline' data, respectively. Macros
-- are called in a very similar fashion to shell programs: the argument string is split on whitespace. The
-- first word is the name of the macro, and the remaining words are the arguments. Option-parsing libraries
-- may be useful for interpreting the arguments.
--
-- Note: using 'blockMacros' and 'inlineMacros' at the same time with the same magic string and can lead to behavior
-- that depends on the order in which they are called, if any of the macro names are the same for block and inline
-- macros. However, if none of the macro names are the same, unrecognized macro names will be ignored by the pass
-- that doesn't recognize them, leaving them available to be recognized by the other pass.
module Yesod.Markdown.Macros
where
import Text.Pandoc
import Safe
import Yesod
import Control.Applicative
import Data.Map ( Map )
import qualified Data.Map as Map
import qualified Data.ByteString.Lazy.UTF8 as U
-- | Convert block-level macros. Block-level macros are signalled by a first-level header containing a piece of
-- inline code starting with a client-specified magic string. For example, if the magic string is @??@, a macro
-- can be called by
--
-- > #`??MACRO_NAME MACRO_ARGS`
--
-- where @MACRO_NAME@ is the identifying name of the macro and @MACRO_ARGS@ is a space-separated list of arguments.
blockMacros
:: Yesod master
=> String -- ^ Magic string to introduce the macro
-> Map String ([String] -> GHandler sub master Block) -- ^ Lookup table from macro names to macro functions
-> Pandoc
-> GHandler sub master Pandoc
blockMacros magic table p = processWithM blockMacros' p where
blockMacros' (Header 1 [Code (splitAt (length magic) -> (magic',words -> ((flip Map.lookup table -> Just f):xs)))])
| magic == magic' = f xs
blockMacros' b = return b
-- | Convert block-level macros. Inline-level macros are signalled by a piece of inline code starting with a
-- client-specified magic string. For example, if the magic string is @??@, a macro can be called by
--
-- > `??MACRO_NAME MACRO_ARGS`
--
-- where @MACRO_NAME@ is the identifying name of the macro and @MACRO_ARGS@ is a space-separated list of arguments.
inlineMacros
:: Yesod master
=> String -- ^ Magic string to introduce the macro
-> Map String ([String] -> GHandler sub master [Inline]) -- ^ Lookup table from macro names to macro functions
-> Pandoc
-> GHandler sub master Pandoc
inlineMacros magic table p = processWithM (fmap concat . mapM inlineMacros') p where
inlineMacros' (Code (splitAt (length magic) -> (magic',words -> ((flip Map.lookup table -> Just f):xs))))
| magic == magic' = f xs
inlineMacros' b = return [b]
-- | Convert a 'Hamlet' value to a 'Block'.
hamletToBlock :: Hamlet (Route master) -> GHandler sub master Block
hamletToBlock x = RawHtml . U.toString . renderHtml . x <$> getUrlRenderParams
-- | Convert a 'Hamlet' value to an 'Inline'.
hamletToInline :: Hamlet (Route master) -> GHandler sub master Inline
hamletToInline x = HtmlInline . U.toString . renderHtml . x <$> getUrlRenderParams
-- | Read in
localRoute :: Read (Route master) => [String] -> GHandler sub master Inline
localRoute = maybe (return (Str "")) f . readMay . unwords where
f = hamletToInline . (\x ->
#if GHC7
[hamlet|
#else
[$hamlet|
#endif
@x@|])
| ajdunlap/yesod-markdown | Yesod/Markdown/Macros.hs | bsd-3-clause | 3,668 | 0 | 21 | 732 | 617 | 333 | 284 | 37 | 2 |
{-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving, PatternGuards,
DeriveFunctor, DeriveFoldable, DeriveTraversable #-}
module Data.VectorNode(
Elem(..), Breadth, Size, Sized(..),
Node, empty, singleton,
splitR, splitL, force, cons, snoc,
breadth, head, tail, init, last, append, null,
(!), foldl', reverse, fromList, toList, adjust,
take, drop, replicate
) where
import Prelude hiding (head, tail, init, last, take, drop, null, reverse, replicate)
import Data.Foldable(Foldable(..))
import Data.Traversable(Traversable(..))
import qualified Data.Vector as V
-- Breadth describes the width of a tree node.
type Breadth = Int
-- Size describes the count of items held in or beneath a node.
type Size = Int
data Node a = Node {-# UNPACK #-} !Size {-# UNPACK #-} !(V.Vector a)
deriving (Eq, Functor, Foldable, Traversable)
------------------------------------------------------------
-- Size class. Defined here because we need to specialize sizeN,
-- and use size while constructing nodes from nodes.
class Sized a where
size :: a -> Size
sizeN :: Node a -> Size
sizeN (Node _ n) = V.foldl' (\s a -> s + size a) 0 n
-- For a while we weren't using Sized here, but were passing a size
-- function instead. But local dictionary specialization ought to
-- apply if we use the type class, making that far more efficient
-- after inlining.
newtype Elem a = Elem { unElem :: a }
deriving (Eq, Ord, Functor, Foldable, Traversable)
instance Show a => Show (Elem a) where
showsPrec p (Elem e) = showsPrec p e -- Don't show the wrappers.
instance Sized (Elem a) where
size _ = 1
sizeN (Node _ n) = V.length n
instance Sized (Node a) where
size (Node s _) = s
------------------------------------------------------------
-- Node manipulation.
empty :: Node a
empty = Node 0 V.empty
singleton :: Elem a -> Node (Elem a)
singleton = Node 1 . V.singleton
-- Compute size of node the hard way, by summing element sizes.
{-# SPECIALIZE node :: V.Vector (Node a) -> Node (Node a) #-}
{-# SPECIALIZE node :: V.Vector (Elem a) -> Node (Elem a) #-}
node :: (Sized a) => V.Vector a -> Node a
node n = Node (sizeN r) n
where r = Node (-1) n
-- Symmetric 0-copy split
splitNode :: V.Vector a -> Breadth -> (V.Vector a, V.Vector a)
splitNode n b = (V.take b n, V.drop b n)
-- Split for R, counting from left, copying left
{-# SPECIALIZE splitR :: Node (Node a) -> Breadth -> (Node (Node a), Node (Node a)) #-}
{-# SPECIALIZE splitR :: Node (Elem a) -> Breadth -> (Node (Elem a), Node (Elem a)) #-}
splitR :: (Sized a) => Node a -> Breadth -> (Node a, Node a)
splitR (Node s n) b =
case splitNode n b of
(d, r) -> (node (V.force d), node r)
-- Split for L, counting from right, copying right
{-# SPECIALIZE splitL :: Node (Elem a) -> Breadth -> (Node (Elem a), Node (Elem a)) #-}
{-# SPECIALIZE splitL :: Node (Node a) -> Breadth -> (Node (Node a), Node (Node a)) #-}
splitL :: (Sized a) => Node a -> Breadth -> (Node a, Node a)
splitL (Node s n) b =
case splitNode n (V.length n - b) of
(l, d) -> (node l, node (V.force d))
force :: Node a -> Node a
force (Node s n) = Node s (V.force n)
{-# SPECIALIZE cons :: (Elem a) -> Node (Elem a) -> Node (Elem a) #-}
{-# SPECIALIZE cons :: (Node a) -> Node (Node a) -> Node (Node a) #-}
cons :: (Sized a) => a -> Node a -> Node a
cons a (Node s n) = Node (size a + s) (V.cons a n)
{-# SPECIALIZE snoc :: Node (Elem a) -> (Elem a) -> Node (Elem a) #-}
{-# SPECIALIZE snoc :: Node (Node a) -> (Node a) -> Node (Node a) #-}
snoc :: (Sized a) => Node a -> a -> Node a
snoc (Node s n) a = Node (size a + s) (V.snoc n a)
breadth :: Node a -> Breadth
breadth (Node _ n) = V.length n
head :: Node a -> a
head (Node _ n) = V.head n
{-# SPECIALIZE tail :: Node (Elem a) -> Node (Elem a) #-}
{-# SPECIALIZE tail :: Node (Node a) -> Node (Node a) #-}
tail :: (Sized a) => Node a -> Node a
tail (Node s n) = Node (s - size (V.head n)) (V.force (V.tail n))
last :: Node a -> a
last (Node _ n) = V.last n
{-# SPECIALIZE init :: Node (Elem a) -> Node (Elem a) #-}
{-# SPECIALIZE init :: Node (Node a) -> Node (Node a) #-}
init :: (Sized a) => Node a -> Node a
init (Node s n) = Node (s - size (V.last n)) (V.force (V.init n))
append :: Node a -> Node a -> Node a
append (Node s1 n1) (Node s2 n2) = Node (s1+s2) (n1 V.++ n2)
null :: Node a -> Bool
null (Node s _) = s == 0
foldl' :: (r -> a -> r) -> r -> Node a -> r
foldl' f z (Node _ n) = V.foldl' f z n
reverse :: Node a -> Node a
reverse (Node s n) = Node s (V.reverse n)
{-# SPECIALIZE fromList :: [Elem a] -> Node (Elem a) #-}
{-# SPECIALIZE fromList :: [Node a] -> Node (Node a) #-}
fromList :: (Sized a) => [a] -> Node a
fromList xs = node (V.fromList xs)
toList :: Node a -> [a]
toList (Node _ v) = V.toList v
{-# INLINE adjust #-}
-- Note that lack of Sized constraint forces this to be a
-- size-preserving adjustment.
adjust :: (a -> a) -> Breadth -> Node a -> Node a
adjust f i (Node s v) = Node s (v V.// [(i, f (v V.! i))])
{-# INLINE (!) #-}
(!) :: Node a -> Breadth -> a
(!) (Node s n) i = n V.! i
-- take without a Sized constraint (caller adjusts the size)
take :: Size -> Breadth -> Node a -> Node a
take s i (Node _ v) = Node s (V.take i v)
-- drop without a Sized constraint (caller adjusts the size)
drop :: Size -> Breadth -> Node a -> Node a
drop s i (Node _ v) = Node s (V.drop i v)
-- replication
replicate :: Size -> Breadth -> a -> Node a
replicate s i a = Node s (V.replicate i a)
| jmaessen/Data.FastSequence | src/Data/VectorNode.hs | bsd-3-clause | 5,484 | 0 | 12 | 1,201 | 1,894 | 999 | 895 | 103 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE UnicodeSyntax #-}
{- | These functions take only the NAME, not the whole path.
-}
module Node
( nodeNamed
, fileNodeNamed
, tagNodeNamed
) where
import Data.Maybe (isJust)
import System.Fuse (getFuseContext)
import DB.Base
import DB.Read (fileEntityNamed, tagEntityNamed)
import Stat.Base (dirStat, contentsFileStat)
import Types
nodeNamed ∷ DB → String → IO (Maybe Node)
nodeNamed db name = do
maybeFile ← fileNodeNamed db name
if isJust maybeFile
then return maybeFile
else tagNodeNamed db name
fileNodeNamed ∷ DB → String → IO (Maybe Node)
fileNodeNamed db name =
fileEntityNamed db name >>= entityToNode
tagNodeNamed ∷ DB → String → IO (Maybe Node)
tagNodeNamed db name =
tagEntityNamed db name >>= entityToNode
----------------
entityToNode ∷ Maybe Entity → IO (Maybe Node)
entityToNode maybeEntity = do
ctx ← getFuseContext
case maybeEntity of
Just (FileEntity _ (File _ contents)) →
return $ Just $ FileNode (contentsFileStat ctx contents) contents
Just (TagEntity _ (Tag _)) →
return $ Just $ DirNode (dirStat ctx)
Nothing →
return Nothing
| marklar/TagFS | src/Node.hs | bsd-3-clause | 1,316 | 0 | 14 | 354 | 359 | 183 | 176 | 34 | 3 |
{-# LANGUAGE RankNTypes, FlexibleInstances, TypeSynonymInstances, QuasiQuotes, MultiParamTypeClasses, TypeFamilies, OverloadedStrings #-}
module MediaSub.Sections.BackendGitAnnex
--( AnnexSec, mkAnnexSec )
where
import Prelude hiding (mapM_)
import qualified Prelude as P
import Utils
import Control.Arrow (second)
import qualified Data.ByteString.Char8 as BC
import Data.Maybe (isNothing)
import Data.Monoid
import Data.Ord (comparing)
import Data.Conduit
import Data.Conduit.Binary
import qualified Data.Conduit.List as CL
import Data.List hiding (insert, delete)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified System.FilePath as FP
import System.Process
import Database.Persist.Sql
import MediaSub.Import hiding (mapM_)
import MediaSub.Sections.Types
import MediaSub.Browser
browsableServerRender fps s = renderDefault (sArea s) fps
browsableFetchElems = fetchElements
browsableFetchPlain fps s = do
(_fps, GAElem _ fp _) <- fetchFile (sArea s) (FP.joinPath fps)
return $ sPath s FP.</> fp
browsableFetchPlainR = fetchFiles
melemToContent (GAElem isdir _fp _mdesc) = (if' isdir "directory" "file", [])
searchableSearchT q s = return . flip ListMany (ListFlat (500, 1) Nothing) $ searchFor s q
updateMedia sec = differenceSortedE snd
(lift (dbSource sec $$ CL.consume) >>= CL.sourceList . sort)
(sourceGitFiles $ sPath sec)
$$ handler [] [] -- elements to delete, new elements
where
-- handler :: [FilePath] -> [(FPS, Html)] -> Sink UpdateTarget (HandlerT master IO) [(FPS, Html)]
handler todel ra = await >>= maybe (lift (delete' todel) >> return ra) handleElement
where
handleElement (Left fp) = handler (todel ++ [fp]) ra
handleElement (Right e) = do
new_elem <- lift $ runDB $ case e of
(False, path) -> do
parent <- findParent path
-- FIXME this check shouldn't be necessary!
exists <- getBy $ UniqueFNode (sArea sec) path
case exists of
Nothing -> void $ insert $ FNode (sArea sec) (entityKey <$> parent) path Nothing
Just _ -> return ()
pathToRecent path
(True, path) -> do
parent <- findParent path
exists <- getBy $ UniqueDNode (sArea sec) path
case exists of
Nothing -> void $ insert $ DNode (sArea sec) (entityKey <$> parent) path
Just _ -> return ()
pathToRecent path
-- reached a directory => delete queue (does this work always?)
lift $ delete' todel
handler [] (new_elem : ra)
findParent path = getBy $ UniqueDNode (sArea sec) (FP.takeDirectory path)
delete' [] = return ()
delete' paths = runDB $ P.mapM_ delAll
=<< selectList [DNodeArea ==. sArea sec, DNodePath <-. paths] [Desc DNodePath]
delAll (Entity k _) = do deleteWhere [FNodeArea ==. sArea sec, FNodeParent ==. Just k]
deleteWhere [DNodeArea ==. sArea sec, DNodeParent ==. Just k]
delete k
-- * Query
fetchElements :: FPS -> AnnexSec -> ListViewConf -> MediaView MediaSub AnnexSec
fetchElements fps s (ListFlat mpg _) = case fps of
[] -> do
(source, n) <- fetchRoot
return $ ListMany source (ListFlat mpg $ Just n)
_ -> runDB (getBy $ UniqueDNode (sArea s) path) >>= \md -> case md of
Just dir -> do
(source, n) <- fetchDirectory dir
return $ ListMany source (ListFlat mpg $ Just n)
Nothing -> liftM (ListSingle . snd) $ fetchFile (sArea s) path
where
path = FP.joinPath fps
fetchRoot = pagingQuery (sArea s) mpg Nothing
fetchDirectory = pagingQuery (sArea s) mpg . Just . entityKey
fetchFile :: SectionId -> FilePath -> HandlerT MediaSub IO (FPS, MElem MediaSub AnnexSec)
fetchFile area = liftM (((,) <$> FP.splitDirectories . fNodePath <*> toGAElem) . entityVal)
. runDB . getBy404 . UniqueFNode area
where toGAElem = GAElem <$> const False
<*> fNodePath
<*> fNodeDetails
pagingQuery :: SectionId -> Paging -> Maybe (Key DNode)
-> HandlerT MediaSub IO (Source (HandlerT MediaSub IO) (FPS, MElem MediaSub AnnexSec), Int)
pagingQuery secid (limit, offset) mp = liftM ((,) getSource) countQuery
where
getSource = runDBSource . mapOutput toElem $ myquery
myquery = rawQuery qstring $ toPersistValue secid : maybe [] (\x -> [toPersistValue x]) mp
++ [ toPersistValue limit, toPersistValue $ limit * offset ]
qstring = T.unlines
[ "SELECT isfile, path, details FROM (SELECT FALSE as isfile, area, path, parent, NULL as details FROM d_node"
, "UNION SELECT TRUE as isfile, area, path, parent, details as details FROM f_node)"
, "_ WHERE area = ? AND parent " <> if' (isNothing mp) "IS NULL" "= ?"
, "ORDER BY isfile, path LIMIT ? OFFSET ?"
]
countQuery = runDB $ liftM2 (+) ( count [FNodeArea ==. secid, FNodeParent ==. mp] )
( count [DNodeArea ==. secid, DNodeParent ==. mp] )
fetchFiles :: FPS -> AnnexSec -> MediaSource MediaSub FilePath
fetchFiles fps s = runDBSource . mapOutput toFilePath $ rawQuery qstring
[ toPersistValue secid, toPersistValue secid, toPersistValue $ FP.joinPath fps ]
where
secid = sArea s
qstring =
"WITH RECURSIVE tr_nodes(id, parent, path) AS ( "
<> "WITH nodes AS ( SELECT d.id, d.parent, d.path FROM d_node d WHERE area = ? "
<> "UNION SELECT NULL, f.parent, f.path FROM f_node f WHERE area = ? "
<> ") SELECT * FROM nodes WHERE path = ? "
<> "UNION ALL SELECT n.* FROM tr_nodes nr, nodes n WHERE n.parent = nr.id "
<> ") SELECT path FROM tr_nodes WHERE id IS NULL ORDER BY path"
searchFor :: AnnexSec -> Text -> Source (HandlerT MediaSub IO) (FPS, MElem MediaSub AnnexSec)
searchFor sec qtext = runDBSource . mapOutput toElem $ rawQuery (query "") -- TODO limits
[ toPersistValue (sArea sec)
, toPersistValue $ ".*" <> qtext <> ".*"
] where
query limits = T.unlines
[ "SELECT isfile, path, details FROM (SELECT FALSE as isfile, path, area, NULL as details FROM d_node"
, "UNION SELECT TRUE as isfile, path, area, details as details FROM f_node)"
, "_ WHERE area = ? AND path ~* ? ORDER BY isfile, path" <> limits ]
-- * Update
type FWrap = (Bool, FilePath) -- ^ (Is dir?, relative path)
type UpdateTarget = Either FilePath FWrap -- ^ Right delete_this, Left add_this
pathToRecent :: Monad m => FilePath -> m (FPS, Html)
pathToRecent path = return
( FP.splitDirectories path
, toHtml . f $ FP.splitDirectories path
) where f [] = "(empty? this shouldn't be possible)" ++ path
f xs = last xs
| SimSaladin/rnfssp | rnfssp-media/MediaSub/Sections/BackendGitAnnex.hs | bsd-3-clause | 7,514 | 1 | 25 | 2,449 | 1,912 | 977 | 935 | -1 | -1 |
module CRF.Gradient
( computeGradient
, applyGradient
, Gradient
) where
import CRF.Base
import CRF.LogMath (logAdd)
import CRF.Feature (featuresIn)
import CRF.Model (Model, expectedFeaturesIn)
import qualified CRF.Model.Internal as MI
import qualified CRF.Data.MarkedArray as MA
type Gradient = MA.MarkedArray
computeGradient :: Model -> Double -> Gradient -> [Sent Int Int] -> IO Gradient
computeGradient crf scale buffer part =
let ns = concat $ map featuresIn part
ens = concat $ map (expectedFeaturesIn crf) part
followPtrs = map (\(feat, val) -> (MI.featToIx feat crf, val))
in do
gradient <- MA.consumeWith logAdd (followPtrs ens) buffer
>>= MA.mapArray (\v -> - exp v)
>>= MA.consumeWith (+) (followPtrs ns)
>>= MA.mapArray (* scale)
return gradient
applyGradient :: Gradient -> Model -> IO Model
applyGradient grad crf =
MA.elems grad >>= \xs -> MI.consumeWith (+) xs crf
| kawu/tagger | src/CRF/Gradient.hs | bsd-3-clause | 975 | 4 | 16 | 224 | 294 | 166 | 128 | 25 | 1 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module App where
import Yesod
import Yesod.Paginator
import Network.Wai.Handler.Warp (run)
data App = App
mkYesod "App" [parseRoutes|
/ RootR GET
|]
instance Yesod App where
approot = ApprootRelative
defaultLayout widget = do
pc <- widgetToPageContent widget
giveUrlRenderer [hamlet|$newline never
$doctype 5
<html lang="en">
<head>
<meta charset="utf-8">
<title>#{pageTitle pc}
<!-- steal boostrap -->
<link rel="stylesheet" href="http://pbrisbin.com/static/css/bootstrap.min.css">
^{pageHead pc}
<body>
^{pageBody pc}
|]
getRootR :: Handler Html
getRootR = do
-- unneeded return here to match README
things' <- return [1..1142]
(things, widget) <- paginate 3 things'
defaultLayout $ do
setTitle "My title"
[whamlet|$newline never
<h1>Pagination
<p>The things:
<ul>
$forall thing <- things
<li>Thing #{show thing}
<div .pagination>
^{widget}
|]
main :: IO ()
main = run 3000 =<< toWaiApp App
| jamesdabbs/yesod-paginator | Test.hs | bsd-3-clause | 1,462 | 0 | 10 | 542 | 181 | 99 | 82 | 25 | 1 |
{-# LANGUAGE TypeFamilies,
MultiParamTypeClasses,
RankNTypes,
DeriveFunctor,
GeneralizedNewtypeDeriving
#-}
{-|
Module : Control.Monad.Free.NonPure
Copyright : (c) 2015 Maciej Piróg
License : MIT
Maintainer : [email protected]
Stability : experimental
The @'NonPure'@ datatype behaves like a free monad with at least
one layer of structure. It is not a @'Monad'@ (there is no way to
define @'return'@) but it is a @'Bind'@.
-}
module Control.Monad.Free.NonPure
(
NonPure(..),
toNonPure,
toFree,
hoistNonPure,
unfoldNonPure
)
where
import Prelude hiding (foldr)
import Control.Monad.Free (Free(..), hoistFree)
import qualified Control.Monad.Free as Free (unfold)
import Data.Foldable (Foldable(..))
import Data.Traversable (Traversable(..))
import Data.Functor.Apply (Apply(..))
import Data.Functor.Bind (Bind(..))
-- | Type of \"free monads\" with at least one level of structure.
newtype NonPure f a = NonPure { unNonPure :: f (Free f a) }
deriving(Functor)
instance (Functor f, Foldable f) => Foldable (NonPure f) where
foldMap g f = foldMap g (toFree f)
foldr g u f = foldr g u (toFree f)
instance (Functor f, Traversable f) => Traversable (NonPure f) where
sequenceA (NonPure f) = fmap NonPure $ traverse sequenceA f
instance (Functor f) => Apply (NonPure f) where
NonPure f <.> b = NonPure $ fmap (<.> toFree b) f
instance (Functor f) => Bind (NonPure f) where
NonPure f >>- h = NonPure $ fmap (>>= toFree . h) f
-- | Transform @'Free'@ to @'NonPure'@. Succeeds only if the
-- argument is indeed non-pure.
toNonPure :: Free f a -> Maybe (NonPure f a)
toNonPure (Pure a) = Nothing
toNonPure (Free f) = Just $ NonPure f
-- | Embedd @'NonPure'@ into @'Free'@.
toFree :: NonPure f a -> Free f a
toFree (NonPure f) = Free f
-- | Lift a natural transformation to \"rename\" the nodes in the
-- structure.
hoistNonPure :: (Functor g) => (forall a. f a -> g a) -> NonPure f a -> NonPure g a
hoistNonPure h (NonPure f) = NonPure $ fmap (hoistFree h) $ h f
-- | Unfold a @'NonPure'@ from a seed @s@.
unfoldNonPure :: (Functor f) => (s -> f (Either a s)) -> s -> NonPure f a
unfoldNonPure h s = NonPure $ fmap (Free.unfold $ fmap h) $ h s
| maciejpirog/modules-over-monads | src/Control/Monad/Free/NonPure.hs | mit | 2,260 | 9 | 12 | 475 | 670 | 362 | 308 | 39 | 1 |
{-# LANGUAGE OverloadedStrings, CPP #-}
{- hpodder component
Copyright (C) 2006-2007 John Goerzen <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Types
Copyright : Copyright (C) 2006-2007 John Goerzen
License : GNU GPL, version 2 or above
Maintainer : John Goerzen <[email protected]>
Stability : provisional
Portability: portable
Written by John Goerzen, jgoerzen\@complete.org
-}
module Types where
import Control.Applicative
import Control.Monad (mzero)
import Data.ConfigFile
import Data.Aeson
import Data.ByteString.Lazy (ByteString)
import Data.List (isPrefixOf)
import Data.String.Utils (strip)
import Data.Text (unpack)
decode_json :: FromJSON a => ByteString -> a
decode_json = either error id . eitherDecode
{- | Removes potentially problematic or malicious stuff -}
sanitize :: String -> String
sanitize = strip . map sanitizer
where sanitizer c
| c `elem` "\n\r\0\t" = ' '
| otherwise = c
{- | Twitter has an additional level of escaping for < and > only.
Sigh. -}
unEsc :: String -> String
unEsc [] = []
unEsc x
| "<" `isPrefixOf` x = '<' : unEsc (drop 4 x)
| ">" `isPrefixOf` x = '>' : unEsc (drop 4 x)
| otherwise = head x : unEsc (tail x)
data Command =
Command {cmdname :: String,
cmddescrip :: String,
execcmd :: [String] -> Maybe FilePath -> ConfigParser -> IO ()}
data Message = Message {
sId :: String,
sSender :: String,
sRecipient :: String,
sText :: String,
sDate :: String
} deriving (Eq, Read, Show, Ord)
newtype TimelineMessage = TimelineMessage { fromTimeline :: Message }
instance FromJSON TimelineMessage where
parseJSON j = TimelineMessage <$> parseTimelineMessage j
parseTimelineMessage (Object v) = Message <$>
s v "id_str" <*>
(v .: "user" >>= extractScreenName) <*>
pure "" <*>
retweetOrText v <*>
s v "created_at"
parseTimelineMessage _ = mzero
newtype DirectMessage = DirectMessage { fromDM :: Message }
instance FromJSON DirectMessage where
parseJSON j = DirectMessage <$> parseDirectMessage j
parseDirectMessage (Object v) = Message <$>
s v "id_str" <*>
s v "sender_screen_name" <*>
s v "recipient_screen_name" <*>
(unEsc <$> s v "text") <*>
s v "created_at"
parseDirectMessage _ = mzero
extractScreenName (Object v) = s v "screen_name"
extractScreenName _ = mzero
retweetOrText v = unEsc <$> ((retweet v) <|> (unpack <$> v .: "text")) where
retweet v = do
rt <- v .: "retweeted_status"
user <- rt .: "user" >>= extractScreenName
text <- rt .: "text"
return $ "RT @" ++ user ++ ": " ++ text
s v name = sanitize <$> v .: name
data UserList = UserList [ListedUser] (Maybe String)
newtype ListedUser = ListedUser { fromListedUser :: (String, String) }
instance FromJSON UserList where
parseJSON (Object v) = UserList <$> v .: "users" <*> v .:? "next_cursor_str"
parseJSON _ = mzero
instance FromJSON ListedUser where
parseJSON (Object v) = (ListedUser .) . (,) <$>
v .: "screen_name" <*>
v .: "id_str"
parseJSON _ = mzero
| jgoerzen/twidge | Types.hs | gpl-2.0 | 3,774 | 0 | 13 | 806 | 881 | 464 | 417 | 73 | 1 |
{- |
Module : $Header$
Copyright : (c) Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
Printer for N-triples
-}
module RDF.Print where
import Common.AS_Annotation
import Common.Doc hiding (sepBySemis, sepByCommas)
import Common.DocUtils hiding (ppWithCommas)
import OWL2.AS
import OWL2.Print ()
import RDF.AS
import RDF.Symbols
import RDF.Sign
import qualified Data.Set as Set
import Data.Maybe (isNothing)
sepBySemis :: [Doc] -> Doc
sepBySemis = vcat . punctuate (text " ;")
ppWithSemis :: Pretty a => [a] -> Doc
ppWithSemis = sepBySemis . map pretty
sepByCommas :: [Doc] -> Doc
sepByCommas = vcat . punctuate (text " ,")
ppWithCommas :: Pretty a => [a] -> Doc
ppWithCommas = sepByCommas . map pretty
instance Pretty Predicate where
pretty = printPredicate
printPredicate :: Predicate -> Doc
printPredicate (Predicate iri) = pretty iri
instance Pretty RDFLiteral where
pretty lit = case lit of
RDFLiteral b lexi ty -> text (if not b
then '"' : lexi ++ "\""
else "\"\"\"" ++ lexi ++ "\"\"\"") <> case ty of
Typed u -> keyword cTypeS <> pretty u
Untyped tag -> if isNothing tag then empty
else let Just tag2 = tag in text "@" <> text tag2
RDFNumberLit f -> text (show f)
instance Pretty PredicateObjectList where
pretty = printPredObjList
printPredObjList :: PredicateObjectList -> Doc
printPredObjList (PredicateObjectList p ol) = pretty p <+> ppWithCommas ol
instance Pretty Subject where
pretty = printSubject
printSubject :: Subject -> Doc
printSubject subj = case subj of
Subject iri -> pretty iri
SubjectList ls -> brackets $ ppWithSemis ls
SubjectCollection c -> parens $ (hsep . map pretty) c
instance Pretty Object where
pretty = printObject
printObject :: Object -> Doc
printObject obj = case obj of
Object s -> pretty s
ObjectLiteral l -> pretty l
instance Pretty Triples where
pretty = printTriples
printTriples :: Triples -> Doc
printTriples (Triples s ls) = pretty s <+> ppWithSemis ls <+> dot
instance Pretty Statement where
pretty = printStatement
printStatement :: Statement -> Doc
printStatement s = case s of
Statement t -> pretty t
PrefixStatement (PrefixR p iri)
-> text "@prefix" <+> pretty p <> colon <+> pretty iri <+> dot
BaseStatement (Base iri) -> text "@base" <+> pretty iri <+> dot
instance Pretty TurtleDocument where
pretty = printDocument
printDocument :: TurtleDocument -> Doc
printDocument doc = (vcat . map pretty) (statements doc)
printExpandedIRI :: IRI -> Doc
printExpandedIRI iri = if iriType iri == NodeID then text $ showQU iri
else text "<" <> text (expandedIRI iri) <> text ">"
instance Pretty Term where
pretty = printTerm
printTerm :: Term -> Doc
printTerm t = case t of
SubjectTerm iri -> printExpandedIRI iri
PredicateTerm iri -> printExpandedIRI iri
ObjectTerm obj -> case obj of
Right lit -> pretty lit
Left iri -> printExpandedIRI iri
instance Pretty Axiom where
pretty = printAxiom
printAxiom :: Axiom -> Doc
printAxiom (Axiom sub pre obj) = pretty sub <+> pretty pre <+> pretty obj
<+> text "."
printAxioms :: [Axiom] -> Doc
printAxioms = vcat . map pretty
-- | RDF signature printing
printRDFBasicTheory :: (Sign, [Named Axiom]) -> Doc
printRDFBasicTheory (_, l) = vsep (map (pretty . sentence) l)
instance Pretty Sign where
pretty = printSign
printNodes :: String -> Set.Set Term -> Doc
printNodes s terms = text "#" <+> text s $+$
vcat (map ((text "#\t\t" <+>) . pretty) (Set.toList terms))
printSign :: Sign -> Doc
printSign s = printNodes "subjects:" (subjects s)
$+$ printNodes "predicates:" (predicates s)
$+$ printNodes "objects:" (objects s)
-- | Symbols printing
instance Pretty RDFEntityType where
pretty ety = text $ show ety
instance Pretty RDFEntity where
pretty (RDFEntity ty ent) = pretty ty <+> pretty ent
instance Pretty SymbItems where
pretty (SymbItems m us) = pretty m <+> ppWithCommas us
instance Pretty SymbMapItems where
pretty (SymbMapItems m us) = pretty m
<+> sepByCommas
(map (\ (s, ms) -> sep
[ pretty s
, case ms of
Nothing -> empty
Just t -> mapsto <+> pretty t]) us)
instance Pretty RawSymb where
pretty rs = case rs of
ASymbol e -> pretty e
AnUri u -> pretty u
| mariefarrell/Hets | RDF/Print.hs | gpl-2.0 | 4,628 | 0 | 18 | 1,160 | 1,496 | 741 | 755 | 113 | 4 |
left `after` right = right ++ left
| evolutics/haskell-formatter | testsuite/resources/source/handles_infix_binding/Output.hs | gpl-3.0 | 35 | 0 | 5 | 7 | 18 | 9 | 9 | 1 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.DeleteAccountPasswordPolicy
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the password policy for the AWS account.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_DeleteAccountPasswordPolicy.html AWS API Reference> for DeleteAccountPasswordPolicy.
module Network.AWS.IAM.DeleteAccountPasswordPolicy
(
-- * Creating a Request
deleteAccountPasswordPolicy
, DeleteAccountPasswordPolicy
-- * Destructuring the Response
, deleteAccountPasswordPolicyResponse
, DeleteAccountPasswordPolicyResponse
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'deleteAccountPasswordPolicy' smart constructor.
data DeleteAccountPasswordPolicy =
DeleteAccountPasswordPolicy'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteAccountPasswordPolicy' with the minimum fields required to make a request.
--
deleteAccountPasswordPolicy
:: DeleteAccountPasswordPolicy
deleteAccountPasswordPolicy = DeleteAccountPasswordPolicy'
instance AWSRequest DeleteAccountPasswordPolicy where
type Rs DeleteAccountPasswordPolicy =
DeleteAccountPasswordPolicyResponse
request = postQuery iAM
response
= receiveNull DeleteAccountPasswordPolicyResponse'
instance ToHeaders DeleteAccountPasswordPolicy where
toHeaders = const mempty
instance ToPath DeleteAccountPasswordPolicy where
toPath = const "/"
instance ToQuery DeleteAccountPasswordPolicy where
toQuery
= const
(mconcat
["Action" =:
("DeleteAccountPasswordPolicy" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString)])
-- | /See:/ 'deleteAccountPasswordPolicyResponse' smart constructor.
data DeleteAccountPasswordPolicyResponse =
DeleteAccountPasswordPolicyResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DeleteAccountPasswordPolicyResponse' with the minimum fields required to make a request.
--
deleteAccountPasswordPolicyResponse
:: DeleteAccountPasswordPolicyResponse
deleteAccountPasswordPolicyResponse = DeleteAccountPasswordPolicyResponse'
| fmapfmapfmap/amazonka | amazonka-iam/gen/Network/AWS/IAM/DeleteAccountPasswordPolicy.hs | mpl-2.0 | 2,954 | 0 | 11 | 553 | 284 | 174 | 110 | 48 | 1 |
{-# LANGUAGE FlexibleInstances, TypeFamilies #-}
{- |
Defines the prisoner's dilemma, stag hunt, and a suite of strategies.
From GHCi, try some of the following.
>>> nash pd
>>> pareto pd
>>> paretoNash pd
>>> paretoNash stag
>>> execGame pd [tft, pavlov] (times 10 >> printTranscripts >> printScore)
>>> axelrod [fink, tft, grim', pavlov, preserver]
-}
module Hagl.Examples.Prisoner where
import Control.Monad.State
import Prelude hiding (last, print)
import Hagl
--
-- * Game representations
--
-- | A move indicating whether to cooperate or not.
data Cooperation = C -- ^ Cooperate
| D -- ^ Defect
deriving (Eq, Read, Show)
-- | A dilemma game is a normal form game about cooperating or defecting.
type Dilemma = Normal Cooperation
-- | The classic prisoner's dilemma.
pd :: Dilemma
pd = symmetric [C,D] [2,0,3,1]
-- | The stag hunt. Similar to the prisoner's dilemma except mutual
-- cooperation is both a Nash equilibrium and Pareto optimal, and is
-- therefore stable.
stag :: Dilemma
stag = symmetric [C,D] [3,0,2,1]
--
-- * Players
--
--
-- ** Simple strategies
-- | Always defects.
fink :: Player Dilemma
fink = "Fink" ::: pure D
-- | Always cooperates.
mum :: Player Dilemma
mum = "Mum" ::: pure C
-- | Alternates between cooperation and defection.
alt :: Player Dilemma
alt = "Alternator" ::: periodic [C,D]
-- | Alternates between defection and cooperation.
dc :: Player Dilemma
dc = "(DC)*" ::: periodic [D,C]
-- | Defects every third round.
ccd :: Player Dilemma
ccd = "(CCD)*" ::: periodic [C,C,D]
-- | Defects randomly with a probability of 1/6.
rr :: Player Dilemma
rr = "Russian Roulette" ::: mixed [(5,C), (1,D)]
-- | Plays randomly, cooperates or defects with equal probability.
randy :: DiscreteGame g => Player g
randy = "Randy" ::: randomly
--
-- ** More sophisticated/complex strategies
-- | The famous Tit-for-Tat. Cooperates initially, then plays the last
-- move played by its opponent.
tft :: Player Dilemma
tft = "Tit for Tat" ::: play C `atFirstThen` his (lastGame's onlyMove)
-- | The same strategy as 'alt', implemented using state.
alt' :: Player Dilemma
alt' = Player "Stately Alternator" C $
do m <- get
put $ if m == C then D else C
return m
-- | Cooperates every third round, implemented using state.
mod3 :: Player Dilemma
mod3 = Player "Mod3 Cooperator" 0 $
do i <- get
put (i+1)
return $ if i `mod` 3 == 0 then C else D
-- | Suspicious Tit-for-Tat. Like Tit-for-Tat but defect on first move.
suspicious :: Player Dilemma
suspicious = "Suspicious Tit-for-Tat" ::: play D `atFirstThen` his (lastGame's onlyMove)
-- | A variant of Tit-for-Tat that only defects after two defects in a row.
titForTwoTats :: Player Dilemma
titForTwoTats = "Tit-for-Two-Tats" ::: [play C, play C] `thereafter`
do ms <- his `each` lastNGames' 2 onlyMove
return $ if ms == [D, D] then D else C
-- | The Grim Trigger. Cooperates until the opponent defects, then defects
-- forever.
grim :: Player Dilemma
grim = "Grim Trigger" :::
do ms <- his `each` completedGames' onlyMove
play (if D `elem` ms then D else C)
-- | The Grim Trigger, implemented using state. Much faster than 'grim', since
-- it doesn't examine every previous game iteration.
grim' :: Player Dilemma
grim' = Player "Stately Grim" False $
play C `atFirstThen`
do m <- her (lastGame's onlyMove)
triggered <- update (|| m == D)
play (if triggered then D else C)
-- | If last move resulted in a "big" payoff, do it again, otherwise switch.
pavlov :: Player Dilemma
pavlov = "Pavlov" :::
randomly `atFirstThen`
do p <- my (lastGame's payoffs)
m <- my (lastGame's onlyMove)
return $ if p > 1 then m else
if m == C then D else C
-- | Picks randomly until it has a lead, then preserves it by repeatedly
-- defecting.
preserver :: Player Dilemma
preserver = "Preserver" :::
randomly `atFirstThen`
do me <- my score
he <- his score
if me > he then return D else randomly
--
-- * Experiments
--
-- | Run a tournament similar to Robert Axelrod's famous study.
axelrod :: [Player Dilemma] -> IO ()
axelrod ps = roundRobin pd 2 ps (times 200) >>= printResults
| pparkkin/Hagl | Hagl/Examples/Prisoner.hs | bsd-3-clause | 4,225 | 0 | 11 | 908 | 956 | 526 | 430 | 70 | 3 |
module All where
import FP ()
import MAAM ()
import Lang.LamIf ()
import Lang.Hask ()
| FranklinChen/maam | src/All.hs | bsd-3-clause | 87 | 0 | 4 | 16 | 32 | 21 | 11 | 5 | 0 |
{-# LANGUAGE PartialTypeSignatures, NamedWildcards #-}
module GenNamed where
bar :: _a -> _a
bar x = not x
| bitemyapp/ghc | testsuite/tests/partial-sigs/should_compile/GenNamed.hs | bsd-3-clause | 108 | 0 | 5 | 19 | 25 | 14 | 11 | 4 | 1 |
import Data.List
import Data.Function
main :: IO ()
main = do
let input = replicate 1000000 'c' ++ "defghi"
print $ trampoline $ snd $ break (== 'd') input
| rahulmutt/ghcvm | tests/suite/trampoline/run/TrampolineBreak.hs | bsd-3-clause | 161 | 0 | 11 | 34 | 68 | 34 | 34 | 6 | 1 |
-- |
-- Module : Crypto.MAC.HMAC
-- License : BSD-style
-- Maintainer : Vincent Hanquez <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- Provide the HMAC (Hash based Message Authentification Code) base algorithm.
-- <http://en.wikipedia.org/wiki/HMAC>
--
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Crypto.MAC.HMAC
( hmac
, hmacLazy
, HMAC(..)
-- * Incremental
, Context(..)
, initialize
, update
, updates
, finalize
) where
import Crypto.Hash hiding (Context)
import qualified Crypto.Hash as Hash (Context)
import Crypto.Hash.IO
import Crypto.Internal.ByteArray (ScrubbedBytes, ByteArrayAccess)
import qualified Crypto.Internal.ByteArray as B
import Data.Memory.PtrMethods
import Crypto.Internal.Compat
import qualified Data.ByteString.Lazy as L
-- | Represent an HMAC that is a phantom type with the hash used to produce the mac.
--
-- The Eq instance is constant time. No Show instance is provided, to avoid
-- printing by mistake.
newtype HMAC a = HMAC { hmacGetDigest :: Digest a }
deriving (ByteArrayAccess)
instance Eq (HMAC a) where
(HMAC b1) == (HMAC b2) = B.constEq b1 b2
-- | Compute a MAC using the supplied hashing function
hmac :: (ByteArrayAccess key, ByteArrayAccess message, HashAlgorithm a)
=> key -- ^ Secret key
-> message -- ^ Message to MAC
-> HMAC a
hmac secret msg = finalize $ updates (initialize secret) [msg]
-- | Compute a MAC using the supplied hashing function, for a lazy input
hmacLazy :: (ByteArrayAccess key, HashAlgorithm a)
=> key -- ^ Secret key
-> L.ByteString -- ^ Message to MAC
-> HMAC a
hmacLazy secret msg = finalize $ updates (initialize secret) (L.toChunks msg)
-- | Represent an ongoing HMAC state, that can be appended with 'update'
-- and finalize to an HMAC with 'hmacFinalize'
data Context hashalg = Context !(Hash.Context hashalg) !(Hash.Context hashalg)
-- | Initialize a new incremental HMAC context
initialize :: (ByteArrayAccess key, HashAlgorithm a)
=> key -- ^ Secret key
-> Context a
initialize secret = unsafeDoIO (doHashAlg undefined)
where
doHashAlg :: HashAlgorithm a => a -> IO (Context a)
doHashAlg alg = do
!withKey <- case B.length secret `compare` blockSize of
EQ -> return $ B.withByteArray secret
LT -> do key <- B.alloc blockSize $ \k -> do
memSet k 0 blockSize
B.withByteArray secret $ \s -> memCopy k s (B.length secret)
return $ B.withByteArray (key :: ScrubbedBytes)
GT -> do
-- hash the secret key
ctx <- hashMutableInitWith alg
hashMutableUpdate ctx secret
digest <- hashMutableFinalize ctx
hashMutableReset ctx
-- pad it if necessary
if digestSize < blockSize
then do
key <- B.alloc blockSize $ \k -> do
memSet k 0 blockSize
B.withByteArray digest $ \s -> memCopy k s (B.length digest)
return $ B.withByteArray (key :: ScrubbedBytes)
else
return $ B.withByteArray digest
(inner, outer) <- withKey $ \keyPtr ->
(,) <$> B.alloc blockSize (\p -> memXorWith p 0x36 keyPtr blockSize)
<*> B.alloc blockSize (\p -> memXorWith p 0x5c keyPtr blockSize)
return $ Context (hashUpdates initCtx [outer :: ScrubbedBytes])
(hashUpdates initCtx [inner :: ScrubbedBytes])
where
blockSize = hashBlockSize alg
digestSize = hashDigestSize alg
initCtx = hashInitWith alg
{-# NOINLINE initialize #-}
-- | Incrementally update a HMAC context
update :: (ByteArrayAccess message, HashAlgorithm a)
=> Context a -- ^ Current HMAC context
-> message -- ^ Message to append to the MAC
-> Context a -- ^ Updated HMAC context
update (Context octx ictx) msg =
Context octx (hashUpdate ictx msg)
-- | Increamentally update a HMAC context with multiple inputs
updates :: (ByteArrayAccess message, HashAlgorithm a)
=> Context a -- ^ Current HMAC context
-> [message] -- ^ Messages to append to the MAC
-> Context a -- ^ Updated HMAC context
updates (Context octx ictx) msgs =
Context octx (hashUpdates ictx msgs)
-- | Finalize a HMAC context and return the HMAC.
finalize :: HashAlgorithm a
=> Context a
-> HMAC a
finalize (Context octx ictx) =
HMAC $ hashFinalize $ hashUpdates octx [hashFinalize ictx]
| vincenthz/cryptonite | Crypto/MAC/HMAC.hs | bsd-3-clause | 5,167 | 0 | 28 | 1,786 | 1,069 | 568 | 501 | 88 | 4 |
{-# LANGUAGE GADTs, FlexibleInstances, TypeOperators, ScopedTypeVariables, RankNTypes #-}
-- |
-- Module : Data.Array.Accelerate.Pretty.Print
-- Copyright : [2008..2011] Manuel M T Chakravarty, Gabriele Keller, Sean Lee
-- License : BSD3
--
-- Maintainer : Manuel M T Chakravarty <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Data.Array.Accelerate.Pretty.Print (
-- * Pretty printing functions
PrettyAcc,
prettyPreAcc, prettyAcc,
prettyPreExp, prettyExp,
prettyPreAfun, prettyAfun,
prettyPreFun, prettyFun,
noParens
) where
-- standard libraries
import Text.PrettyPrint
import Prelude hiding (exp)
-- friends
import Data.Array.Accelerate.Array.Sugar
import Data.Array.Accelerate.Tuple
import Data.Array.Accelerate.AST
import Data.Array.Accelerate.Type
-- Pretty printing
-- ---------------
-- The type of pretty printing functions for array computations.
--
type PrettyAcc acc = forall aenv t. Int -> (Doc -> Doc) -> acc aenv t -> Doc
-- Pretty print an array expression
--
prettyAcc :: PrettyAcc OpenAcc
prettyAcc alvl wrap (OpenAcc acc) = prettyPreAcc prettyAcc alvl wrap acc
prettyPreAcc :: PrettyAcc acc -> Int -> (Doc -> Doc) -> PreOpenAcc acc aenv a -> Doc
prettyPreAcc pp alvl wrap (Let acc1 acc2)
= wrap
$ sep [ hang (text "let a" <> int alvl <+> char '=') 2 $
pp alvl noParens acc1
, text "in" <+> pp (alvl + 1) noParens acc2
]
prettyPreAcc pp alvl wrap (Let2 acc1 acc2)
= wrap
$ sep [ hang (text "let (a" <> int alvl <> text ", a" <> int (alvl + 1) <> char ')' <+>
char '=') 2 $
pp alvl noParens acc1
, text "in" <+> pp (alvl + 2) noParens acc2
]
prettyPreAcc pp alvl wrap (PairArrays acc1 acc2)
= wrap $ sep [pp alvl parens acc1, pp alvl parens acc2]
prettyPreAcc _ alvl _ (Avar idx)
= text $ 'a' : show (alvl - idxToInt idx - 1)
prettyPreAcc pp alvl wrap (Apply afun acc)
= wrap $ sep [parens (prettyPreAfun pp alvl afun), pp alvl parens acc]
prettyPreAcc pp alvl wrap (Acond e acc1 acc2)
= wrap $ prettyArrOp "cond" [prettyPreExp pp 0 alvl parens e, pp alvl parens acc1, pp alvl parens acc2]
prettyPreAcc _ _ wrap (Use arr)
= wrap $ prettyArrOp "use" [prettyArray arr]
prettyPreAcc pp alvl wrap (Unit e)
= wrap $ prettyArrOp "unit" [prettyPreExp pp 0 alvl parens e]
prettyPreAcc pp alvl wrap (Generate sh f)
= wrap
$ prettyArrOp "generate" [prettyPreExp pp 0 alvl parens sh, parens (prettyPreFun pp alvl f)]
prettyPreAcc pp alvl wrap (Reshape sh acc)
= wrap $ prettyArrOp "reshape" [prettyPreExp pp 0 alvl parens sh, pp alvl parens acc]
prettyPreAcc pp alvl wrap (Replicate _ty ix acc)
= wrap $ prettyArrOp "replicate" [prettyPreExp pp 0 alvl id ix, pp alvl parens acc]
prettyPreAcc pp alvl wrap (Index _ty acc ix)
= wrap $ sep [pp alvl parens acc, char '!', prettyPreExp pp 0 alvl id ix]
prettyPreAcc pp alvl wrap (Map f acc)
= wrap $ prettyArrOp "map" [parens (prettyPreFun pp alvl f), pp alvl parens acc]
prettyPreAcc pp alvl wrap (ZipWith f acc1 acc2)
= wrap
$ prettyArrOp "zipWith"
[parens (prettyPreFun pp alvl f), pp alvl parens acc1, pp alvl parens acc2]
prettyPreAcc pp alvl wrap (Fold f e acc)
= wrap
$ prettyArrOp "fold" [parens (prettyPreFun pp alvl f), prettyPreExp pp 0 alvl parens e,
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Fold1 f acc)
= wrap $ prettyArrOp "fold1" [parens (prettyPreFun pp alvl f), pp alvl parens acc]
prettyPreAcc pp alvl wrap (FoldSeg f e acc1 acc2)
= wrap
$ prettyArrOp "foldSeg" [parens (prettyPreFun pp alvl f), prettyPreExp pp 0 alvl parens e,
pp alvl parens acc1, pp alvl parens acc2]
prettyPreAcc pp alvl wrap (Fold1Seg f acc1 acc2)
= wrap
$ prettyArrOp "fold1Seg" [parens (prettyPreFun pp alvl f), pp alvl parens acc1,
pp alvl parens acc2]
prettyPreAcc pp alvl wrap (Scanl f e acc)
= wrap
$ prettyArrOp "scanl" [parens (prettyPreFun pp alvl f), prettyPreExp pp 0 alvl parens e,
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Scanl' f e acc)
= wrap
$ prettyArrOp "scanl'" [parens (prettyPreFun pp alvl f), prettyPreExp pp 0 alvl parens e,
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Scanl1 f acc)
= wrap
$ prettyArrOp "scanl1" [parens (prettyPreFun pp alvl f), pp alvl parens acc]
prettyPreAcc pp alvl wrap (Scanr f e acc)
= wrap
$ prettyArrOp "scanr" [parens (prettyPreFun pp alvl f), prettyPreExp pp 0 alvl parens e,
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Scanr' f e acc)
= wrap
$ prettyArrOp "scanr'" [parens (prettyPreFun pp alvl f), prettyPreExp pp 0 alvl parens e,
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Scanr1 f acc)
= wrap
$ prettyArrOp "scanr1" [parens (prettyPreFun pp alvl f), pp alvl parens acc]
prettyPreAcc pp alvl wrap (Permute f dfts p acc)
= wrap
$ prettyArrOp "permute" [parens (prettyPreFun pp alvl f), pp alvl parens dfts,
parens (prettyPreFun pp alvl p), pp alvl parens acc]
prettyPreAcc pp alvl wrap (Backpermute sh p acc)
= wrap
$ prettyArrOp "backpermute" [prettyPreExp pp 0 alvl parens sh,
parens (prettyPreFun pp alvl p),
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Stencil sten bndy acc)
= wrap
$ prettyArrOp "stencil" [parens (prettyPreFun pp alvl sten),
prettyBoundary acc bndy,
pp alvl parens acc]
prettyPreAcc pp alvl wrap (Stencil2 sten bndy1 acc1 bndy2 acc2)
= wrap
$ prettyArrOp "stencil2" [parens (prettyPreFun pp alvl sten),
prettyBoundary acc1 bndy1,
pp alvl parens acc1,
prettyBoundary acc2 bndy2,
pp alvl parens acc2]
prettyBoundary :: forall acc aenv dim e. Elt e
=> {-dummy-}acc aenv (Array dim e) -> Boundary (EltRepr e) -> Doc
prettyBoundary _ Clamp = text "Clamp"
prettyBoundary _ Mirror = text "Mirror"
prettyBoundary _ Wrap = text "Wrap"
prettyBoundary _ (Constant e) = parens $ text "Constant" <+> text (show (toElt e :: e))
prettyArrOp :: String -> [Doc] -> Doc
prettyArrOp name docs = hang (text name) 2 $ sep docs
-- Pretty print a function over array computations.
--
-- At the moment restricted to /closed/ functions.
--
prettyAfun :: Int -> Afun fun -> Doc
prettyAfun = prettyPreAfun prettyAcc
prettyPreAfun :: forall acc fun. PrettyAcc acc -> Int -> PreAfun acc fun -> Doc
prettyPreAfun pp _alvl fun =
let (n, bodyDoc) = count n fun
in
char '\\' <> hsep [text $ 'a' : show idx | idx <- [0..n]] <+>
text "->" <+> bodyDoc
where
count :: Int -> PreOpenAfun acc aenv' fun' -> (Int, Doc)
count lvl (Abody body) = (-1, pp (lvl + 1) noParens body) -- 'lvl+1' ok as functions is closed!
count lvl (Alam fun') = let (n, body) = count lvl fun' in (1 + n, body)
-- Pretty print a function over scalar expressions.
--
prettyFun :: Int -> OpenFun env aenv fun -> Doc
prettyFun = prettyPreFun prettyAcc
prettyPreFun :: forall acc env aenv fun. PrettyAcc acc -> Int -> PreOpenFun acc env aenv fun -> Doc
prettyPreFun pp alvl fun =
let (n, bodyDoc) = count n fun
in
char '\\' <> hsep [text $ 'x' : show idx | idx <- [0..n]] <+>
text "->" <+> bodyDoc
where
count :: Int -> PreOpenFun acc env' aenv' fun' -> (Int, Doc)
count lvl (Body body) = (-1, prettyPreExp pp lvl alvl noParens body)
count lvl (Lam fun') = let (n, body) = count lvl fun' in (1 + n, body)
-- Pretty print an expression.
--
-- * Apply the wrapping combinator (3rd argument) to any compound expressions.
--
prettyExp :: Int -> Int -> (Doc -> Doc) -> OpenExp env aenv t -> Doc
prettyExp = prettyPreExp prettyAcc
prettyPreExp :: forall acc t env aenv.
PrettyAcc acc -> Int -> Int -> (Doc -> Doc) -> PreOpenExp acc env aenv t -> Doc
prettyPreExp _pp lvl _ _ (Var idx)
= text $ 'x' : show (lvl - idxToInt idx)
prettyPreExp _pp _ _ _ (Const v)
= text $ show (toElt v :: t)
prettyPreExp pp lvl alvl _ (Tuple tup)
= prettyTuple pp lvl alvl tup
prettyPreExp pp lvl alvl wrap (Prj idx e)
= wrap $ prettyTupleIdx idx <+> prettyPreExp pp lvl alvl parens e
prettyPreExp _pp _lvl _alvl wrap IndexNil
= wrap $ text "index Z"
prettyPreExp pp lvl alvl wrap (IndexCons t h)
= wrap $
text "index" <+>
parens (prettyPreExp pp lvl alvl parens t <+> text ":." <+> prettyPreExp pp lvl alvl parens h)
prettyPreExp pp lvl alvl wrap (IndexHead ix)
= wrap $ text "indexHead" <+> prettyPreExp pp lvl alvl parens ix
prettyPreExp pp lvl alvl wrap (IndexTail ix)
= wrap $ text "indexTail" <+> prettyPreExp pp lvl alvl parens ix
prettyPreExp _ _ _ wrap (IndexAny)
= wrap $ text "indexAny"
prettyPreExp pp lvl alvl wrap (Cond c t e)
= wrap $ sep [prettyPreExp pp lvl alvl parens c <+> char '?',
parens (prettyPreExp pp lvl alvl noParens t <> comma <+>
prettyPreExp pp lvl alvl noParens e)]
prettyPreExp _pp _ _ _ (PrimConst a)
= prettyConst a
prettyPreExp pp lvl alvl wrap (PrimApp p a)
= wrap $ prettyPrim p <+> prettyPreExp pp lvl alvl parens a
prettyPreExp pp lvl alvl wrap (IndexScalar idx i)
= wrap $ cat [pp alvl parens idx, char '!', prettyPreExp pp lvl alvl parens i]
prettyPreExp pp _lvl alvl wrap (Shape idx)
= wrap $ text "shape" <+> pp alvl parens idx
prettyPreExp pp _lvl alvl wrap (Size idx)
= wrap $ text "size" <+> pp alvl parens idx
-- Pretty print nested pairs as a proper tuple.
--
prettyTuple :: forall acc env aenv t.
PrettyAcc acc -> Int -> Int -> Tuple (PreOpenExp acc env aenv) t -> Doc
prettyTuple pp lvl alvl exp = parens $ sep (map (<> comma) (init es) ++ [last es])
where
es = collect exp
--
collect :: Tuple (PreOpenExp acc env aenv) t' -> [Doc]
collect NilTup = []
collect (SnocTup tup e) = collect tup ++ [prettyPreExp pp lvl alvl noParens e]
-- Pretty print an index for a tuple projection
--
prettyTupleIdx :: TupleIdx t e -> Doc
prettyTupleIdx = int . toInt
where
toInt :: TupleIdx t e -> Int
toInt ZeroTupIdx = 0
toInt (SuccTupIdx tup) = toInt tup + 1
-- Pretty print a primitive constant
--
prettyConst :: PrimConst a -> Doc
prettyConst (PrimMinBound _) = text "minBound"
prettyConst (PrimMaxBound _) = text "maxBound"
prettyConst (PrimPi _) = text "pi"
-- Pretty print a primitive operation
--
prettyPrim :: PrimFun a -> Doc
prettyPrim (PrimAdd _) = text "(+)"
prettyPrim (PrimSub _) = text "(-)"
prettyPrim (PrimMul _) = text "(*)"
prettyPrim (PrimNeg _) = text "negate"
prettyPrim (PrimAbs _) = text "abs"
prettyPrim (PrimSig _) = text "signum"
prettyPrim (PrimQuot _) = text "quot"
prettyPrim (PrimRem _) = text "rem"
prettyPrim (PrimIDiv _) = text "div"
prettyPrim (PrimMod _) = text "mod"
prettyPrim (PrimBAnd _) = text "(.&.)"
prettyPrim (PrimBOr _) = text "(.|.)"
prettyPrim (PrimBXor _) = text "xor"
prettyPrim (PrimBNot _) = text "complement"
prettyPrim (PrimBShiftL _) = text "shiftL"
prettyPrim (PrimBShiftR _) = text "shiftR"
prettyPrim (PrimBRotateL _) = text "rotateL"
prettyPrim (PrimBRotateR _) = text "rotateR"
prettyPrim (PrimFDiv _) = text "(/)"
prettyPrim (PrimRecip _) = text "recip"
prettyPrim (PrimSin _) = text "sin"
prettyPrim (PrimCos _) = text "cos"
prettyPrim (PrimTan _) = text "tan"
prettyPrim (PrimAsin _) = text "asin"
prettyPrim (PrimAcos _) = text "acos"
prettyPrim (PrimAtan _) = text "atan"
prettyPrim (PrimAsinh _) = text "asinh"
prettyPrim (PrimAcosh _) = text "acosh"
prettyPrim (PrimAtanh _) = text "atanh"
prettyPrim (PrimExpFloating _) = text "exp"
prettyPrim (PrimSqrt _) = text "sqrt"
prettyPrim (PrimLog _) = text "log"
prettyPrim (PrimFPow _) = text "(**)"
prettyPrim (PrimLogBase _) = text "logBase"
prettyPrim (PrimTruncate _ _) = text "truncate"
prettyPrim (PrimRound _ _) = text "round"
prettyPrim (PrimFloor _ _) = text "floor"
prettyPrim (PrimCeiling _ _) = text "ceiling"
prettyPrim (PrimAtan2 _) = text "atan2"
prettyPrim (PrimLt _) = text "(<*)"
prettyPrim (PrimGt _) = text "(>*)"
prettyPrim (PrimLtEq _) = text "(<=*)"
prettyPrim (PrimGtEq _) = text "(>=*)"
prettyPrim (PrimEq _) = text "(==*)"
prettyPrim (PrimNEq _) = text "(/=*)"
prettyPrim (PrimMax _) = text "max"
prettyPrim (PrimMin _) = text "min"
prettyPrim PrimLAnd = text "&&*"
prettyPrim PrimLOr = text "||*"
prettyPrim PrimLNot = text "not"
prettyPrim PrimOrd = text "ord"
prettyPrim PrimChr = text "chr"
prettyPrim PrimBoolToInt = text "boolToInt"
prettyPrim (PrimFromIntegral _ _) = text "fromIntegral"
{-
-- Pretty print type
--
prettyAnyType :: ScalarType a -> Doc
prettyAnyType ty = text $ show ty
-}
prettyArray :: forall dim e. Array dim e -> Doc
prettyArray arr@(Array sh _)
= parens $
hang (text "Array") 2 $
sep [showDoc (toElt sh :: dim), dataDoc]
where
showDoc :: forall a. Show a => a -> Doc
showDoc = text . show
l = toList arr
dataDoc | length l <= 1000 = showDoc l
| otherwise = showDoc (take 1000 l) <+>
text "{truncated at 1000 elements}"
-- Auxiliary pretty printing combinators
--
noParens :: Doc -> Doc
noParens = id
-- Auxiliary ops
--
-- Auxiliary dictionary operations
--
{-
-- Show scalar values
--
runScalarShow :: ScalarType a -> (a -> String)
runScalarShow (NumScalarType (IntegralNumType ty))
| IntegralDict <- integralDict ty = show
runScalarShow (NumScalarType (FloatingNumType ty))
| FloatingDict <- floatingDict ty = show
runScalarShow (NonNumScalarType ty)
| NonNumDict <- nonNumDict ty = show
-}
| wilbowma/accelerate | Data/Array/Accelerate/Pretty/Print.hs | bsd-3-clause | 14,281 | 0 | 16 | 3,777 | 4,972 | 2,477 | 2,495 | 263 | 2 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, TemplateHaskell #-}
module Distribution.Server.Framework.AuthTypes where
import Distribution.Server.Framework.MemSize
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Typeable (Typeable)
-- | A plain, unhashed password. Careful what you do with them.
--
newtype PasswdPlain = PasswdPlain String
deriving Eq
-- | A password hash. It actually contains the hash of the username, passowrd
-- and realm.
--
-- Hashed passwords are stored in the format
-- @md5 (username ++ ":" ++ realm ++ ":" ++ password)@. This format enables
-- us to use either the basic or digest HTTP authentication methods.
--
newtype PasswdHash = PasswdHash String
deriving (Eq, Ord, Show, Typeable, MemSize)
newtype RealmName = RealmName String
deriving (Show, Eq)
$(deriveSafeCopy 0 'base ''PasswdPlain)
$(deriveSafeCopy 0 'base ''PasswdHash)
| mpickering/hackage-server | Distribution/Server/Framework/AuthTypes.hs | bsd-3-clause | 895 | 0 | 8 | 131 | 143 | 86 | 57 | 13 | 0 |
module Nesting where
{-|
* We can
* easily go back
1. some indentation
* levels
1. @back at the top@
-}
d :: t
d = undefined
{-|
* Beginning of list
* second list
* Some indented list but
the presence of this text pushes it out of nesting back to the top.
-}
e :: t
e = undefined
{-|
* Beginning of list
@
nested code
we preserve the space correctly
@
-}
f :: t
f = undefined
{-|
* Beginning of list
* Nested list
-}
g :: t
g = undefined
{-|
* Beginning of list
> nested
> bird
> tracks
-}
h :: t
h = undefined
{-|
* Beginning of list
This belongs to the list above!
> nested
> bird
> tracks
>
> another line
> with indentation
>nested bird tracks
> without leading space
* Next list
More of the indented list.
* Deeper
* Deeper
* Even deeper!
* No newline separation even in indented lists.
-}
i :: t
i = undefined
{-|
[All this] Works for
definition lists too.
> nested
> bird
> tracks
* Next list
with more of the indented list content.
Even more content on a new line.
1. Different type of list
(2) Deeper
>>> Here's an example in a list
example result
[b] Even deeper!
[c] No newline separation even in indented lists.
We can have any paragraph level element that we normally
can, like headers
=== Level 3 header
with some content…
* and even more lists inside
-}
j :: t
j = undefined
{-|
- list may start at arbitrary depth
- and consecutive items at that depth
belong to the same list
- of course we can still
* nest items like we are used to
- and then get back to initial list
-}
k :: t
k = undefined
| Acidburn0zzz/haddock | html-test/src/Nesting.hs | bsd-2-clause | 1,973 | 0 | 4 | 778 | 92 | 59 | 33 | 17 | 1 |
-- !!! Pattern binding must bind (not an error in standard Haskell)
module M where
x = let ['a'] = "a" in 'a'
| urbanslug/ghc | testsuite/tests/module/mod64.hs | bsd-3-clause | 110 | 0 | 9 | 23 | 25 | 14 | 11 | 2 | 1 |
module Parser where
import Expense
import StringUtils
import Data.Char
import Data.List
import Text.ParserCombinators.ReadP
data ParseError = ParseError { line :: Int
, col :: Int
, snip :: String
}
instance Show ParseError where
show (ParseError l c s) = format "Error at line {0}, column {1}: {2}"
[show l, show c, s]
parseSpaces :: ReadP ()
parseSpaces = do _ <- munch isSpace
return ()
parseInt :: ReadP Double
parseInt = do a <- munch1 isDigit
return ((read a) :: Double)
parseDouble :: ReadP Double
parseDouble = do a <- munch1 isDigit
b <- satisfy (== '.')
c <- munch1 isDigit
return $ read (a ++ [b] ++ c)
getAmount :: ReadP Double
getAmount = parseDouble <++ parseInt
getDate :: ReadP (Integer, Int, Int)
getDate = do y <- munch1 isDigit
_ <- satisfy (== '-')
m <- munch1 isDigit
_ <- satisfy (== '-')
d <- munch1 isDigit
return (read y, read m, read d)
parseTag :: Bool -> ReadP String
parseTag True = munch1 (\c -> not $ (isSpace c || c == ','))
parseTag False = do _ <- satisfy (== ',')
parseTag True
getTags :: ReadP [String]
getTags = do h <- parseTag True
t <- many $ parseTag False
return (h:t)
getNote :: ReadP String
getNote = do _ <- satisfy (== '"')
n <- munch (/= '"')
_ <- satisfy (== '"')
return n
getExpense :: ReadP Expense
getExpense = do _ <- parseSpaces
a <- getAmount
_ <- parseSpaces
d <- getDate
_ <- parseSpaces
t <- getTags
_ <- parseSpaces
n <- getNote
_ <- parseSpaces
return Expense { amountOf = a
, dateOf = d
, tagsOf = t
, noteOf = n
}
parseExpenses :: ReadP [Expense]
parseExpenses = many getExpense
-- FIXME Extremely inefficient and messy, but good enough for now
getExpenses :: String -> Either ParseError [Expense]
getExpenses xs
| null unparsed = Right res
| otherwise = Left $ ParseError { line = l
, col = c
, snip = s
}
where (res, unparsed) = last $ readP_to_S parseExpenses $ xs
parsed = take (length xs - length unparsed) xs
parsedL = lines parsed
lastParsedL = last parsedL
l = length parsedL
-- +2: 1 for starting line index at 1, 1 for next (unparsed) char
c = length lastParsedL + 2
s = (take 10 lastParsedL) ++ "..." ++ (take n unparsed) ++ "..."
where n = min 10 (case elemIndex '\n' unparsed of
Nothing -> 10
Just x -> x)
| fredmorcos/attic | projects/pet/archive/pet_haskell_pet2/Parser.hs | isc | 3,032 | 0 | 14 | 1,270 | 911 | 457 | 454 | 77 | 2 |
import Control.Monad (liftM2)
import Notes
import Test.QuickCheck
instance Arbitrary PitchClass where
arbitrary = elements [C .. B]
instance Arbitrary Note where
arbitrary = liftM2 Note arbitrary arbitrary
testInverse :: Note -> Semitone -> Bool
testInverse (Note p o) x = transpose
(transpose (Note p o) x) (-x) == (Note p o)
main = quickCheck testInverse
| Lokilow/Functional-Music | test-suite/Algebraic/NoteTest.hs | mit | 419 | 0 | 10 | 117 | 138 | 72 | 66 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Chattp.Webapp.IPC where
import Chattp.Webapp.Conf
import Chattp.Webapp.Protocol
import Chattp.Webapp.InternalCommunication
import Text.ProtocolBuffers.Header
import Text.ProtocolBuffers.WireMessage
import Chattp.WebappRequestMessage as Rq
import Chattp.WebappResponseMessage as Rp
import Control.Concurrent
import qualified Data.ByteString.Lazy.Char8 as BS
import System.Directory
import System.IO.Error
import Network.Socket
import qualified Network.Socket.ByteString as NBS
-- Thread code handling incoming messages
-- This thread parses the messages before sending them to "center"; we will have several threads so there's no
-- performance problem.
socketIncoming :: Socket -> Chan CenterRequestOrResponse -> IO ()
socketIncoming sock chanToCenter = do
(contents,_addr) <- NBS.recvFrom sock 16384
case parseAnswer (BS.fromStrict contents) of
Right msg -> writeChan chanToCenter (BrokerCenterResponse msg) >> socketIncoming sock chanToCenter
Left err -> putStrLn ("ERR : Discarded message because of: " ++ err) >> socketIncoming sock chanToCenter -- discard
-- Thread code handling outgoing messages
socketOutgoing :: WebappConfiguration -> Socket -> ChanInfo -> IO ()
socketOutgoing conf sock chans = do
msg <- readChan (brokerRequestChan chans)
let rawMessage = BS.toStrict . messagePut $ msg
catchIOError (NBS.sendTo sock rawMessage (brokerSockAddr conf)) (errHandler (requestsAndResponsesToCenterChan chans) msg)
socketOutgoing conf sock chans
where errHandler :: Chan CenterRequestOrResponse -> WebappRequestMessage -> IOError -> IO Int -- if there is an error, an error message is sent back.
errHandler bc msg _ = writeChan bc
(BrokerCenterResponse $ defaultValue { Rp.type' = rqToRpType (Rq.type' msg),
Rp.sequence_number = Rq.sequence_number msg,
Rp.status = Just False,
Rp.error_message = Just $ uFromString "Couldn't reach broker",
Rp.error_code = Just $ fromIntegral 16 } ) >> return 0
-- Socket setup
createWebappSocket :: WebappConfiguration -> IO Socket
createWebappSocket conf | bindFamily conf == WAFamilyUnix = createUnixSocket conf
| bindFamily conf == WAFamilyInet = createInetSocket conf
createUnixSocket, createInetSocket :: WebappConfiguration -> IO Socket
createUnixSocket conf = do
sock <- socket AF_UNIX Datagram defaultProtocol
catchIOError (removeFile (bindAddress conf)) (const $ return ())
bind sock (SockAddrUnix (bindAddress conf))
return sock
createInetSocket conf = do
addrinfos <- getAddrInfo (Just (defaultHints {addrFlags = [],
addrFamily = AF_UNSPEC,
addrSocketType = Datagram }) )
(Just (bindAddress conf))
(Just (show $ bindPort conf))
if null addrinfos
then fail "Couldn't obtain address information (getaddrinfo failed)"
else do
let ai = head addrinfos
sock <- socket (addrFamily ai) Datagram defaultProtocol
bind sock (addrAddress ai)
return sock
| Spheniscida/cHaTTP | webapp/Chattp/Webapp/IPC.hs | mit | 3,337 | 0 | 16 | 862 | 774 | 394 | 380 | 56 | 2 |
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Y2018.M04.D11.Exercise where
{--
Like we've done before, we need to download packets of information (articles)
and (eventually) store those articles, but also store the packet information
that we used to download the articles
see also: Y2017.M12.D20.Exercise.
But this is different: the exercise from last year, the packet contained
information about the packet, itself. This REST endpoint has no such packet
information, so, we'll just store what we know about this packet: the count,
the start and end article IDs and the time dowloaded.
... but we don't have to store the article ids, as those are all derived
from the associated join table.
We'll look at downloading a packet of articles in another exercise (spoiler:
Y2018.M04.D12.Exercise), for today, given the below structures, upload the
packet information to the PostgreSQL database.
--}
import Data.Aeson (Value)
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToRow
-- below imports available via 1HaskellADay git repository
import Data.Time.Stamped
import Store.SQL.Connection
import Store.SQL.Util.Indexed
type PageNumber = Int
type Count = Int
data Protec = Pro { page :: PageNumber, count :: Count, arts :: [Value] }
deriving Show
-- (I call it Protec for 'reasons' ... yes, I'm weird)
instance ToRow Protec where
toRow p = undefined
protecStmt :: Query
protecStmt = [sql|INSERT INTO package (time, page, count)
VALUES (?, ?, ?) returning id|]
insertProtec :: Connection -> Protec -> IO Index
insertProtec conn prot = undefined
-- Note, we want to insert a Stamped Protec at the time of insert, hint-hint
protec :: Protec
protec = Pro 1 100 []
-- insert the above value. What value do you get in return?
-- we'll do article insertion from Protec values and article-packet join later
| geophf/1HaskellADay | exercises/HAD/Y2018/M04/D11/Exercise.hs | mit | 1,911 | 0 | 9 | 329 | 184 | 116 | 68 | 21 | 1 |
module Language.Plover.CLI
(CompilerOpts(..), TargetFlag(..), hasOptFlag, compilerOpts, splitColon)
where
import System.Console.GetOpt
import System.Exit
import qualified Data.Set as S
import Data.Maybe
import Data.List
data CompilerOpts
= CompilerOpts
{ inputFiles :: [String]
, unitName :: Maybe String
, hFilePrefix :: Maybe String
, cFilePrefix :: Maybe String
, libPrefix :: Maybe String
, includePaths :: [String]
, target :: TargetFlag
, debugMode :: Bool
, helpMode :: Bool
, optMode :: OptFlags
} deriving (Show)
data TargetFlag = TargetParse
| TargetConvert
| TargetTypeCheck
| TargetCodeGen
| TargetDefault
deriving (Show, Eq)
type OptFlags = S.Set String
defaultOptions :: CompilerOpts
defaultOptions = CompilerOpts
{ inputFiles = []
, unitName = Nothing
, hFilePrefix = Nothing
, cFilePrefix = Nothing
, libPrefix = Nothing
, includePaths = ["."]
, target = TargetDefault
, debugMode = False
, helpMode = False
, optMode = defaultOptFlags
}
defaultOptFlags :: OptFlags
defaultOptFlags = S.empty
hasOptFlag :: String -> OptFlags -> Bool
hasOptFlag name opts = name `S.member` opts
optimizations :: [(String, String)]
optimizations =
[
("test", "A placeholder optimization")
]
optimizationClasses =
[ ("all", map fst optimizations) ]
showOptimizations :: String
showOptimizations = unlines $ map showOpt optimizations
where maxnamelength = maximum $ map (length . fst) optimizations
showOpt (name, desc) = replicate (maxnamelength - length name) ' '
++ name ++ " : " ++ desc
showOptClasses :: String
showOptClasses = "\n Optimization classes which can be passed to --opt:\n"
++ (unlines $ map showOptClass optimizationClasses)
where maxnamelength = max 10 (maximum $ map (length . fst) optimizationClasses)
showOptClass (name, opts) = replicate (maxnamelength - length name) ' '
++ name ++ " : " ++ optlist opts
optlist opts = prelines $ map (intercalate " ") $ intoFive opts
prelines = intercalate ("\n " ++ replicate maxnamelength ' ')
intoFive :: [a] -> [[a]]
intoFive list | null list = []
| length list < 5 = [list]
| otherwise = let (xs, ys) = splitAt 5 list
in xs:(intoFive ys)
options :: [OptDescr (CompilerOpts -> CompilerOpts)]
options =
[ Option ['o'] ["out"] (ReqArg unitName' "NAME") "Output unit NAME"
, Option [] ["cdir"] (ReqArg cFilePrefix' "DIR")
"Directory to place generated .c files"
, Option [] ["hdir"] (ReqArg hFilePrefix' "DIR")
"Directory to place generated .h files"
, Option [] ["libprefix"] (ReqArg libPrefix' "STRING")
"Library prefix to prefix includes with"
, Option "I" [] (ReqArg includePaths' "DIRS")
"List of module search paths"
, Option ['t'] ["target"] (ReqArg target' "TARGET")
("Set target type:\n" ++
"\t parse : Parses the input file\n" ++
"\t convert : Converts the input file to core\n" ++
"\t typecheck : Typechecks the file\n" ++
"\t codegen : Outputs the generated C" )
, Option ['d'] ["debug"] (NoArg debug') "Enables debug mode"
, Option ['h'] ["help"] (NoArg help') "Prints this usage information"
, Option ['O'] ["opt"] (ReqArg optimize' "OPTIMIZATION")
("Enables optimizations:\n"
++ showOptimizations
++ "\nPrefixing an optimization with '-' disables it."
++ "\nall/none enables/disables ALL optimizations.")
]
where unitName' s opts = opts { unitName = Just s }
cFilePrefix' s opts = opts { cFilePrefix = Just s }
hFilePrefix' s opts = opts { hFilePrefix = Just s }
libPrefix' s opts = opts { libPrefix = Just s }
includePaths' s opts = opts { includePaths = splitColon s ++ includePaths opts }
target' t opts = opts { target = targetOpt t }
debug' opts = opts { debugMode = True }
help' opts = opts { helpMode = True }
optimize' t opts
= opts { optMode = foldl (flip id) (optMode opts)
[optOpt p | p <- splitOn ',' t] }
splitOn :: Char -> String -> [String]
splitOn delim = foldr split' [""]
where split' c l@(x:xs) | c == delim = []:l
| otherwise = (c:x):xs
targetOpt :: String -> TargetFlag
targetOpt s = case s of
"parse" -> TargetParse
"convert" -> TargetConvert
"typecheck" -> TargetTypeCheck
"codegen" -> TargetCodeGen
_ -> TargetDefault
optOpt :: String -> OptFlags -> OptFlags
optOpt s opts = case s of
"none" -> S.empty
'-':name -> opts S.\\ optLookup name
name -> opts `S.union` optLookup name
where optLookup name = case lookup name optimizationClasses of
Just s -> S.fromList s
Nothing -> case lookup name optimizations of
Just _ -> S.singleton name
Nothing -> S.empty
-- | Takes an argument list and gives a 'CompilerOpts'. If there is a
-- parse error or help request, this function uses 'System.Exit' to
-- halt the entire program.
compilerOpts :: [String] -> IO CompilerOpts
compilerOpts argv = case getOpt argorder options argv of
(o,_,[]) -> let opts = foldl (flip id) defaultOptions o
in do
mapM_ (doHandler opts) optionHandlers
return opts
(_,_,errs) -> do putStr (concat errs ++ usageInfo usageHeader options)
exitWith $ ExitFailure 1
where
argorder = ReturnInOrder (\s opts -> opts { inputFiles = inputFiles opts ++ [s] })
-- Option handling stuff
type OptionPred = CompilerOpts -> Bool
type OptionChecker = (OptionPred, IO ())
usageHeader :: String
usageHeader = "Usage: plover [OPTIONS...] sources"
doHandler :: CompilerOpts -> OptionChecker -> IO ()
doHandler opts (p, m) = if p opts then return () else m
implies :: Bool -> Bool -> Bool
implies a b = not a || b
equiv :: Bool -> Bool -> Bool
equiv a b = implies a b && implies b a
okayDir :: OptionPred
okayDir opts = isJust (cFilePrefix opts) == isJust (hFilePrefix opts)
okayO :: OptionPred
okayO opts = isJust (unitName opts) `implies` (length (inputFiles opts) <= 1)
optionHandlers :: [OptionChecker]
optionHandlers =
[ (not . helpMode, printHelpMode)
, (okayDir, printError "c_output and h_output options must accompany each other.")
, (okayO, printError "Must have at most one compilation unit with --out option.")
]
printError :: String -> IO ()
printError str = do
putStrLn $ "Error: " ++ str
exitWith $ ExitFailure 1
printHelpMode :: IO ()
printHelpMode = do
putStr $ usageInfo usageHeader options
putStr $ showOptClasses
exitSuccess
splitColon = splitColon' ""
splitColon' acc [] = [reverse acc]
splitColon' acc (':' : cs) = reverse acc : splitColon' "" cs
splitColon' acc (c : cs) = splitColon' (c : acc) cs
| swift-nav/plover | src/Language/Plover/CLI.hs | mit | 7,435 | 0 | 14 | 2,279 | 2,108 | 1,119 | 989 | 164 | 5 |
module KAT.Utils
( module Crypto.Cipher.Tests
, concatKATs
) where
import Crypto.Cipher.Tests
concatKATs :: [KATs] -> KATs
concatKATs l = KATs (m kat_ECB) (m kat_CBC) (m kat_CFB) (m kat_CTR) (m kat_XTS) (m kat_AEAD)
where
m :: (KATs -> [x]) -> [x]
m sel = concat $ map sel l
| stbuehler/haskell-nettle | src/Tests/KAT/Utils.hs | mit | 284 | 4 | 10 | 57 | 138 | 75 | 63 | 8 | 1 |
module Test.DistMatrix where
import Data.Aeson
import Data.Either
import qualified Data.ByteString.Char8 as C8
import Test.Hspec
import Web.Google.Maps.Services.DistanceMatrix
import Test.DistMatrixData
tests :: Spec
tests =
describe "Response tests" $ do
let result :: Either String DMResponse
result = eitherDecodeStrict . C8.pack $ okResponse
it "can be parsed" $
result `shouldSatisfy` isRight
it "has status ok" $
result `shouldSatisfy` statusOk
statusOk :: Either String DMResponse -> Bool
statusOk (Left _) = False
statusOk (Right r) = dmrStatus r == Ok
| jhedev/google-maps | test/Test/DistMatrix.hs | mit | 626 | 0 | 13 | 142 | 170 | 92 | 78 | 19 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ExtendedDefaultRules #-}
{-# LANGUAGE LambdaCase #-}
module Main where
import Prelude hiding (FilePath)
import Control.Monad
import Data.Maybe
import qualified Data.Text as T
import System.Environment
import Shelly
psql_path :: FilePath
psql_path = "psql"
schema_init_path :: FilePath
schema_init_path = "schema_init.sql"
preflight :: FilePath -> Sh [T.Text]
preflight script = liftM catMaybes checks
where checks = sequence [
test_px psql_path >>= (\e -> return $ if e then Nothing else Just $ "psql not found in $PATH.")
,test_f script >>= (\e -> return $ if e then Nothing else Just $ (toTextIgnore script) `T.append` " not found.")
]
main :: IO()
main = getArgs >>= \case [] -> shelly $ defaultScript
[s] -> shelly $ (thisScript . fromText . T.pack) s
where defaultScript = do
preflight schema_init_path >>= \case [] -> return ()
es -> echo (T.unlines es) >> quietExit 1
run_ psql_path ["-U", "broscore", "-f", toTextIgnore schema_init_path]
thisScript s = do
preflight s >>= \case [] -> return ()
es -> echo (T.unlines es) >> quietExit 1
run_ psql_path ["-U", "broscore", "-f", toTextIgnore s]
| TravisWhitaker/BroScore-backend | sql/SchemaInit.hs | mit | 1,391 | 0 | 17 | 422 | 405 | 215 | 190 | 30 | 4 |
{------------------------------------------------------------------------------
uPuppet: Evaluation
------------------------------------------------------------------------------}
module UPuppet.Eval ( evalPuppet ) where
import Data.List
import Debug.Trace
import UPuppet.CState
import UPuppet.AST
import UPuppet.Catalog
import UPuppet.Options
{------------------------------------------------------------------------------
Types private to the evaluation
------------------------------------------------------------------------------}
-- define the type of the environment
type Env = [(Scope, Name, Value)]
-- look up a variable under some scope in the environment
lookupEnv :: Env -> Scope -> Name -> Maybe Value
lookupEnv [] _ _ = Nothing
lookupEnv ((s, n, v):es) sco x | (x == n && sco == s) = (Just v)
| otherwise = (lookupEnv es sco x)
-- clear the elements in the environment associated with some specific scope
clearScope :: Scope -> Env -> Env
clearScope sco [] = []
clearScope sco ((s,n,v):es) | s == sco = clearScope sco es
| otherwise = (s,n,v):clearScope sco es
-- define the definitions
data Def = ClassDef (Maybe Name) OptParameterList Statements
| DeclaredClass (Scope)
| ResTypeDef Name OptParameterList Statements
deriving (Show)
-- define the type for the definition environment
type DefEnv = [(Name, Def)]
-- the parent scope of a current scope for dereferencing
parentof :: DefEnv -> Scope -> Scope
parentof defEnv sco = case sco of
SClass b -> (lookupDefEnv defEnv b)
SNode -> STop
STop -> error "Top scope: No higher scope"
SDef b -> baseof defEnv b
-- the base scope (toplevel or node) in effect in a given scope
baseof :: DefEnv -> Scope -> Scope
baseof defEnv STop = STop
baseof defEnv SNode = SNode
baseof defEnv (SDef sco) = baseof defEnv sco
baseof defEnv (SClass a) = baseof defEnv (lookupDefEnv defEnv a)
-- look up the variables in the environment with respect to the parent scope relation
lookforVar :: Env -> DefEnv -> Scope -> Variable -> Value
-- when the variable is a local variable
lookforVar es defEnv sco (LocalVar x) = case (lookupEnv es sco x) of
(Just b) -> b
Nothing -> case sco of STop -> (error ("unqualified variable not found in any scope: " ++ show x))
sco -> (lookforVar es defEnv (parentof defEnv sco) (LocalVar x))
-- when the variable is a variable with a scope
lookforVar es defEnv sco (ScopeVar sco' x) = case (lookupEnv es sco' x) of
(Just b) -> b
Nothing -> error ("lookForVar: " ++ (show sco') ++ " :: " ++ (show x))
-- creat an environment by adding a scope
extendEnv :: Scope -> [(String, ValueExp)] -> Env
extendEnv _ [] = []
extendEnv sco ((x, (DeRef (Values y))):ys) = (sco, x, y):(extendEnv sco ys)
-- change the status of a class in the definition environment to "Declared"
changeDef :: DefEnv -> String -> Scope -> DefEnv
changeDef ((n, def):ds) a sco | n /= a = (n, def):(changeDef ds a sco)
| n == a = (n, (DeclaredClass sco)):ds
-- look up the definiton environment for the parent class of a class
lookupDefEnv :: DefEnv -> Name -> Scope
lookupDefEnv [] b = STop
lookupDefEnv ((a, def):ds) b | a == b = case def of
DeclaredClass sco -> sco
| a /= b = (lookupDefEnv ds b)
-- loop up the status of a class in the definition environment
lookupDef :: (Eq a, Show a) => a -> [(a, b)] -> b
lookupDef a [] = error ("lookupDef: cannot find " ++ show a)
lookupDef a ((name, v):ds) | (a == name) = v
| a /= name = (lookupDef a ds)
-- check whether a class in the definition environment
isDef :: DefEnv -> String -> Bool
isDef [] _ = False
isDef ((x, def):ds) n = if x == n then True else isDef ds n
-- define the type of states of a program in the process of evaluation
type States a = (Env, DefEnv, Catalog, a)
{------------------------------------------------------------------------------
Evaluation of expressions of muPuppet
------------------------------------------------------------------------------}
evalExp :: States ValueExp -> Scope -> ValueExp
-- evaluate the variables
-- it corresponds to the rules LVar, PVar, TVar and Qvar.
-- Function lookforVar looks up the variables in the environment under the scope with respect to the parent scope relation
evalExp (env, defEnv, cv, (DeRef (Var x))) sco = (DeRef (Values (lookforVar env defEnv sco x)))
-- evaluate the sum of two integer numbers
-- it corresponds to the rule ARITHValue
evalExp (env, defEnv, cv, (BinOps AddOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueInt (x + y))))
-- evaluation of the sum of two float numbers
evalExp (env, defEnv, cv, (BinOps AddOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueFloat (x + y))))
-- evaluate the minus of two integer numbers
evalExp (env, defEnv, cv, (BinOps MinOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueInt (x - y))))
-- evaluate the minus of two float numbers
evalExp (env, defEnv, cv, (BinOps MinOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueFloat (x - y))))
-- evaluate the multiplication of two integer numbers
evalExp (env, defEnv, cv, (BinOps TimOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueInt (x * y))))
-- evaluate the multiplication of two float numbers
evalExp (env, defEnv, cv, (BinOps TimOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueFloat (x * y))))
-- evaluate the division of two integer numbers
evalExp (env, defEnv, cv, (BinOps DivOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueInt (x `div` y))))
-- evaluate the division of two float numbers
evalExp (env, defEnv, cv, (BinOps DivOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueFloat (x / y))))
-- evaluate the "and" operation of two boolean values
-- it corresponds to the rule ANDValue
evalExp (env, defEnv, cv, (BinOps AndOp (DeRef (Values (ValueBool x))) (DeRef (Values (ValueBool y))))) sco = (DeRef (Values (ValueBool (x && y))))
-- evaluate the "or" operation of two boolean values
evalExp (env, defEnv, cv, (BinOps OrOp (DeRef (Values (ValueBool x))) (DeRef (Values (ValueBool y))))) sco = (DeRef (Values (ValueBool (x || y))))
-- evaluate the "not" operation on the value "Ture"
-- it corresponds to the rule NOTValueI
evalExp (env, defEnv, cv, (Not (DeRef (Values (ValueBool True))))) sco = (DeRef (Values (ValueBool False)))
-- evaluate the "not" operation on the value "False"
-- it corresponds to the rule NOTValueII
evalExp (env, defEnv, cv, (Not (DeRef (Values (ValueBool False))))) sco = (DeRef (Values (ValueBool True)))
-- evaluate the ">" operation on two integer numbers
-- it corresponds to the rules COMPValueI and COMPValueII
evalExp (env, defEnv, cv, (BinOps GrtOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueBool (x > y))))
-- evaluate the ">" operation on two float numbers
evalExp (env, defEnv, cv, (BinOps GrtOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueBool (x > y))))
-- evaluate the "<" operation on two integer numbers
evalExp (env, defEnv, cv, (BinOps LessOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueBool (x < y))))
-- evaluate the "<" operation on two float numbers
evalExp (env, defEnv, cv, (BinOps LessOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueBool (x < y))))
-- evaluate the ">=" operation on two integer numbers
evalExp (env, defEnv, cv, (BinOps GeqOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueBool (x >= y))))
-- evaluate the ">=" operation on two float numbers
evalExp (env, defEnv, cv, (BinOps GeqOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueBool (x >= y))))
-- evaluate the "<=" operation on two integer numbers
evalExp (env, defEnv, cv, (BinOps LeqOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueBool (x <= y))))
-- evaluate the "<=" operation on two float numbers
evalExp (env, defEnv, cv, (BinOps LeqOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueBool (x <= y))))
-- evaluate the "==" operation on two integer numbers
evalExp (env, defEnv, cv, (BinOps EqOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueBool (x == y))))
-- evaluate the "==" operation on two float numbers
evalExp (env, defEnv, cv, (BinOps EqOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueBool (x == y))))
-- evaluate the "==" operation on two string values
evalExp (env, defEnv, cv, (BinOps EqOp (DeRef (Values (ValueString x))) (DeRef (Values (ValueString y))))) sco = (DeRef (Values (ValueBool (x == y))))
-- evaluate the "==" operation on two boolean values
evalExp (env, defEnv, cv, (BinOps EqOp (DeRef (Values (ValueBool x))) (DeRef (Values (ValueBool y))))) sco = (DeRef (Values (ValueBool (x == y))))
-- evaluate the "!=" operation on two integer values
evalExp (env, defEnv, cv, (BinOps UneqOp (DeRef (Values (ValueInt x))) (DeRef (Values (ValueInt y))))) sco = (DeRef (Values (ValueBool (x /= y))))
-- evaluate the "!=" operation on two float values
evalExp (env, defEnv, cv, (BinOps UneqOp (DeRef (Values (ValueFloat x))) (DeRef (Values (ValueFloat y))))) sco = (DeRef (Values (ValueBool (x /= y))))
-- evaluate the "!=" operation on two string values
evalExp (env, defEnv, cv, (BinOps UneqOp (DeRef (Values (ValueString x))) (DeRef (Values (ValueString y))))) sco = (DeRef (Values (ValueBool (x /= y))))
-- evaluate the "!=" operation on two boolean values
evalExp (env, defEnv, cv, (BinOps UneqOp (DeRef (Values (ValueBool x))) (DeRef (Values (ValueBool y))))) sco = (DeRef (Values (ValueBool (x /= y))))
-- evaluate the "Not" operation on an expression
-- corresponds to the rule NOTStep
evalExp (env, defEnv, cv, (Not exp)) sco = (Not (evalExp (env, defEnv, cv, exp) sco))
-- evaluate the second argument of any binary operation by the operator belonging to BinOps
-- it corresponds to the rules ARITHRight, COMRight, ANDRightI, ANDRightII
evalExp (env, defEnv, cv, (BinOps op (DeRef (Values v)) exp')) sco = BinOps op (DeRef (Values v)) (evalExp (env, defEnv, cv, exp') sco)
-- evaluate the first argument of any binary operation by the operator belonging to BinOps
-- it corresponds to the rules ARITHLeft, COMLeft, ANDLeft
evalExp (env, defEnv, cv, (BinOps op exp exp')) sco = BinOps op (evalExp (env, defEnv, cv, exp) sco) exp'
-- evaluate the control expression in a selector if it is not a value
-- it corresponds to the rule SControl
evalExp (env, defEnv, cv, (Selector s sbody)) sco | not(isVal s) = Selector (evalExp (env,defEnv, cv, s) sco) sbody
evalExp (env, defEnv, cv, (Selector _ [])) sco = error "No value returned by selector"
-- compares the control value to the cases, if the cases are not values, evalutes them.
-- it corresponds to the rule SChooseI, SChooseII and SCase
evalExp (env, defEnv, cv, (Selector s@(DeRef (Values v)) ((x,z):xs))) sco =
case x of (DeRef (Values (ValueString "default"))) -> z
(DeRef (Values w)) -> if v == w then z else (Selector s xs)
_ -> Selector s ((e,z):xs)
where e = evalExp (env, defEnv, cv, x) sco
-- error message represents there is no default case in a seletor and no matches.
evalExp (env, defEnv, cv, (Selector _ _)) sco = error "Selector"
-- error message for an empty array
evalExp (env, defEnv, cv, (Array [])) sco = error "empty array"
-- check whether an array is an array value, if not, evaluate the array by function "toValueArray"
-- it corresponds to the rule ARRExp, ARREleI, ARREleII
evalExp (env, defEnv, cv, (Array (as))) sco = if (valueArray as) then (DeRef (Values (ValueArray (toValueArray as))))
else (Array (evalArray (env, defEnv, cv, as) sco))
-- error message for an empty hash
evalExp (env, defEnv, cv, (Hash [])) sco = error "empty hash"
-- check whether a hash is a hash value, if not, evaluate the hash by function "toValueHash"
-- it corresponds to the rule HAExp, HAEleI, HAEleII
evalExp (env, defEnv, cv, (Hash hs)) sco = if (valueHash hs) then (DeRef (Values (ValueHash (toValueHash hs))))
else (Hash (evalHash (env, defEnv, cv, hs) sco))
-- evaluate the array and hash dereferences
-- it corresponds to the rules DEREFExp, DEREFIndex, DEREFArray, DEREFHash
evalExp (env, defEnv, cv, (DeRef (DeRefItem x r))) sco = case x of
(Var var) -> DeRef (DeRefItem (Values (lookforVar env defEnv sco var)) r)
(Values (ValueArray s)) -> case r of
(DeRef (Values (ValueInt x))) -> (DeRef (deRefArray s x))
DeRef (Values x) -> error "evalDeRefArray"
_ -> DeRef (DeRefItem x (evalExp (env, defEnv, cv, r) sco))
(Values (ValueHash s)) -> case r of
DeRef (Values x) -> DeRef (deRefHash s x)
_ -> DeRef (DeRefItem x (evalExp (env, defEnv, cv, r) sco))
(Values (ValueRef a b)) -> case r of
(DeRef (Values (ValueString x))) -> (lookupCat cv a b x)
_ -> error "evalValueRef"
(ResRef a b) -> case b of
(DeRef (Values (ValueString x))) -> (DeRef (DeRefItem (Values (ValueRef a x)) r))
(DeRef (Values _ )) -> error "evalResRef"
_ -> (DeRef (DeRefItem (ResRef a (evalExp (env, defEnv, cv, b) sco)) r))
(DeRefItem a b) -> case (evalExp (env, defEnv, cv, (DeRef x)) sco) of
(DeRef y) -> (DeRef (DeRefItem y r))
_ -> error "evalExp1"
-- evaluate the resource dereference
-- it corresponds to the rules REFRes and DEREFRes
evalExp (env, defEnv, cv, (DeRef (ResRef r n))) sco = case n of
(DeRef (Values (ValueString x))) -> (DeRef (Values (ValueRef r x)))
(DeRef (Values _ )) -> error "ResRef1"
_ -> (DeRef (ResRef r (evalExp (env, defEnv, cv, n) sco)))
-- check whether an array is an array value
valueArray :: [ValueExp] -> Bool
valueArray [] = True
valueArray ((DeRef (Values a)):as) = (valueArray as)
valueArray _ = False
-- change a list of value expressions to a list of values
toValueArray :: [ValueExp] -> [Value]
toValueArray [] = []
toValueArray ((DeRef (Values a)):as) = (a:(toValueArray as))
-- check whether an array is an hash value
valueHash :: [(Value, ValueExp)] -> Bool
valueHash [] = True
valueHash ((a, (DeRef (Values b))):hs) = valueHash hs
valueHash _ = False
-- change a list of a list of value and value expression pairs to a list of value and value pairs
toValueHash :: [(Value, ValueExp)] -> [(Value, Value)]
toValueHash [] = []
toValueHash ((a, (DeRef (Values b))) : hs) = ((a, b):(toValueHash hs))
-- evaluate a list of expressions to a list of values
evalArray :: States [ValueExp] -> Scope -> [ValueExp]
evalArray (_, _, _, []) sco = []
evalArray (env, defEnv, cv, ((DeRef (Values a)):as)) sco = ((DeRef (Values a)):(evalArray (env, defEnv, cv, as) sco))
evalArray (env, defEnv, cv, (a:as)) sco = (evalExp (env, defEnv, cv, a) sco):as
-- evaluate a list of a list of value and expression pairs to a list of value and value pairs
evalHash :: States [(Value, ValueExp)] -> Scope -> [(Value, ValueExp)]
evalHash (_, _, _, []) sco = []
evalHash (env, defEnv, cv, ((x, (DeRef (Values h))):hs)) sco = ((x, (DeRef (Values h))):(evalHash (env, defEnv, cv, hs) sco))
evalHash (env, defEnv, cv, ((x,h):hs)) sco = (x, (evalExp (env, defEnv, cv, h) sco)):hs
{------------------------------------------------------------------------------
Evaluation of the statements of muPuppet
------------------------------------------------------------------------------}
evalStat :: States Statements -> Scope -> States Statements
-- show error message when evaluating “Skip”
evalStat (env, defEnv, cv, Skip) sco = error "evalStat1"
-- evalute assignment statement
-- it corresponds to the rules ASSIGN and ASSIGNStep
evalStat (env, defEnv, cv, (Assignment x y)) sco = case y of
(DeRef (Values v)) -> if lookupEnv env sco x /= Nothing then error ("Variable " ++ show x ++ " already defined in scope " ++ show sco)
else ((env ++ [(sco, x, v)]), defEnv, cv, Skip)
_ -> (env, defEnv, cv, (Assignment x (evalExp (env, defEnv, cv, y) sco)))
-- evalute "if" statement when the control expression is equal to "True"
-- it corresponds to the rule IFT
evalStat (env, defEnv, cv, (If (DeRef (Values (ValueBool True))) y k)) sco = (env, defEnv, cv, y)
-- evalute "if" statement when the control expression is equal to "False"
-- it corresponds to the rule IFF
evalStat (env, defEnv, cv, (If (DeRef (Values (ValueBool False))) y k)) sco =
case k of
Nothing -> (env, defEnv, cv, Skip)
Just (Elseif e s k) -> (env, defEnv, cv, If e s k)
Just (Else s) -> (env, defEnv, cv, s)
-- evalute "if" statement when the control expression is an expression
-- it corresponds to the rule IFStep
evalStat (env, defEnv, cv, (If x y k)) sco =
let e = evalExp (env, defEnv, cv, x) sco in (env, defEnv, cv, (If e y k))
-- evalute "unless" statement when the control expression is equal to "True"
-- it corresponds to the rule UNLESST
evalStat (env, defEnv, cv, (Unless (DeRef (Values (ValueBool True))) s k)) sco =
case k of
Nothing -> (env, defEnv, cv, Skip)
Just (Else s) -> (env, defEnv, cv, s)
Just (Elseif _ _ _) -> error "evalStat: 'elsif' not allowed with 'unless'"
-- evalute "unless" statement when the control expression is equal to "False"
-- it corresponds to the rule UNLESSF
evalStat (env, defEnv, cv, (Unless (DeRef (Values (ValueBool False))) s k)) sco = (env, defEnv, cv, s)
-- if the control value is not a boolean, show error message, corresponding to the error in the real Puppet
evalStat (env, defEnv, cv, (Unless (DeRef (Values v)) s k)) sco = error "evalStat: Test component of 'unless' is not a Boolean value!"
-- evalute "unless" statement when the control expression is an expression
-- it corresponds to the rule UNLESSStep
evalStat (env, defEnv, cv, (Unless e s k)) sco = (env, defEnv, cv, (Unless e2 s k))
where e2 = evalExp (env, defEnv, cv, e) sco
-- evalute "case" statement if there is no cases
-- it corresponds to the rule CASEDone
evalStat (env, defEnv, cv, (Case x [])) sco = (env, defEnv, cv, Skip)
-- evalute "case" statement if there are cases
-- the branches correspond to the rule CASEMatch, CASENoMatch, CASEStep2 and CASEStep1 respectively
evalStat (env, defEnv, cv, (Case x ((z, s):xs))) sco = case x of
(DeRef (Values y)) -> case z of
(DeRef (Values (ValueString "default"))) -> (env, defEnv, cv, s)
(DeRef (Values n)) -> if (y==n) then (env, defEnv, cv, s) else (env, defEnv, cv, (Case x xs))
_ -> (env, defEnv, cv, (Case x ((e, s):xs)))
where e = evalExp (env, defEnv, cv, z) sco
_ -> (env, defEnv, cv, (Case e ((z, s):xs)))
where e = evalExp (env, defEnv, cv, x) sco
-- evalute "resource"
-- the branches correspond to RESDecl, RESStep, RESStepI, RESStepII, RESTitle,
evalStat (env, defEnv, cv, (Resource x y rs)) sco = case y of
(DeRef (Values (ValueString n))) -> if (valueRes rs) then (env, defEnv, (extendCat cv (x,n,toValueRes rs) ), Skip)
else (env, defEnv, cv, (Resource x y (evaltoListValue env defEnv cv sco rs)))
(DeRef (Values _ )) -> error "wrong type of resource name"
_ -> (env, defEnv, cv, (Resource x e rs))
where e = evalExp (env, defEnv, cv, y) sco
-- evaluate "include" statement
-- the branches correspond to the rules for difference cases of class "a" which are INCD, INCU, INCPD and INCPU
evalStat (env, defEnv, cv, (Include a)) sco =
case (lookupDef a defEnv) of
(DeclaredClass _ ) -> (env, defEnv, cv, Skip)
(ClassDef Nothing p s) -> (env, (changeDef defEnv a (baseof defEnv sco)), cv, (ScopeStat (SClass a) (ClassCont (mergeParams [] p) s)))
(ClassDef (Just b) p s) -> case (lookupDef b defEnv) of
(DeclaredClass _ ) -> (env, (changeDef defEnv a (SClass b)), cv, (ScopeStat (SClass a) (ClassCont (mergeParams [] p) s)))
(ClassDef _ p s) -> (env, defEnv, cv, (StatementsList [(Include b), (Include a)]))
-- evaluate the helping statement "classcont" in scope statement in muPuppet
evalStat (env, defEnv, cv, (ClassCont p s)) sco =
((env ++ (extendEnv sco (evaltoListValue env defEnv cv sco p))), defEnv, cv, s)
-- evaluate the resource-like class declarations
-- the branches correspond to the rules CDecU, CDecPU and CDecPD
evalStat (env, defEnv, cv, (ClassDecl a as)) sco =
case (lookupDef a defEnv) of
(DeclaredClass _ ) -> error ("Duplicate declaration of class '" ++ a ++ "'")
(ClassDef Nothing ps s) ->
(env, (changeDef defEnv a (baseof defEnv sco)), cv,
(ScopeStat (SClass a) (ClassCont (mergeParams as ps) s)))
(ClassDef (Just b) ps s) ->
case (lookupDef b defEnv) of
(DeclaredClass _) -> (env, (changeDef defEnv a (SClass b)), cv, (ScopeStat (SClass a) (ClassCont (mergeParams as ps) s)))
(ClassDef _ _ _ ) -> (env, defEnv, cv, (StatementsList [(Include b), (ClassDecl a as)]))
-- evaluate the declaration of defined resource types
-- it corresponds to the rules DEF and DETStep
evalStat (env, defEnv, cv, (ResTypeDecl t title as)) sco
| isVal title =
case (lookupDef t defEnv) of
(ResTypeDef t p s) -> if (valueRes as)
then (env, defEnv, cv, (ScopeStat (SDef sco) (ResTypeCont t (("title",title):("name",title):mergeParams as p) s)))
else (env, defEnv, cv, (ResTypeDecl t title (evaltoListValue env defEnv cv sco as)))
_ -> error "resource is not defined"
| otherwise = (env, defEnv, cv, (ResTypeDecl t (evalExp (env, defEnv, cv, title) sco) as))
-- evaluate the helping statement "ResTypeCont" in the scope statement for defined resource types
evalStat (env, defEnv, cv, (ResTypeCont t p s)) sco =
if (valueRes p)
then ((env ++ (extendEnv sco p)), defEnv, cv, s)
else (env, defEnv, cv, (ResTypeCont t (evaltoListValue env defEnv cv sco p) s))
-- evaluate scope statement in muPuppet where the scope is a defined resource type and that reaches "Skip" statement
-- it corresponds to the rule DEFScopeDone
evalStat (env, defEnv, cv, (ScopeStat (SDef a) Skip)) sco = (clearScope (SDef a) env, defEnv, cv, Skip)
-- evaluate scope statement in muPuppet where the scope is "::", "::a" or "::nd" and that reaches "Skip" statement
-- it corresponds to the rule ScopeDone
evalStat (env, defEnv, cv, (ScopeStat a Skip)) sco = (env, defEnv, cv, Skip)
-- evaluate scope statement in muPuppet
-- it corresponds to the rule ScopeStep and DEFScopeStep
evalStat (env, defEnv, cv, (ScopeStat sco' s)) sco =
let (env', defEnv', cv', s') = (evalStat (env, defEnv, cv, s) sco') in (env', defEnv', cv', (ScopeStat sco' s'))
-- change the end of a list of statements in muPuppet to "Skip" statement
evalStat (env, defEnv, cv, (StatementsList [])) sco = (env, defEnv, cv, Skip)
-- evaluate a list of statements in muPuppet
-- it corresponds to the rule SEQSkip
evalStat (env, defEnv, cv, (StatementsList (Skip:xs))) sco = (env, defEnv, cv, (StatementsList xs))
-- evaluate a list of statements in muPuppet
-- it corresponds to the rule SEQStep
evalStat (env, defEnv, cv, (StatementsList (s:xs))) sco =
let (env', defEnv', cv', s') = (evalStat (env, defEnv, cv, s) sco) in
(env', defEnv', cv', (StatementsList (s':xs)))
-- evaluate a list of string and expression pairs
evaltoListValue :: Env -> DefEnv -> Catalog -> Scope -> [(String, ValueExp)] -> [(String, ValueExp)]
evaltoListValue _ _ _ _ [] = []
evaltoListValue env defEnv cv sco ((x,(DeRef (Values y))):ys) = ((x, (DeRef (Values y))):(evaltoListValue env defEnv cv sco ys))
evaltoListValue env defEnv cv sco ((x,y):ys) = ((x, (evalExp (env, defEnv, cv, y) sco)):ys)
-- check whether an expression is a value
isVal :: ValueExp -> Bool
isVal (DeRef (Values v)) = True
isVal _ = False
-- check whether a list of string and expression pairs is a list of string and value pairs
valueRes :: [(String, ValueExp)] -> Bool
valueRes [] = True
valueRes ((x, (DeRef (Values v))):as) = valueRes as
valueRes _ = False
-- convert a list of string and value expression pairs to a list of string and value pairs
toValueRes :: [(String, ValueExp)] -> [(String, Value)]
toValueRes [] = []
toValueRes ((x, (DeRef (Values v))):as) = ((x, v):(toValueRes as))
toValueRes _ = error "not all element is a value"
{------------------------------------------------------------------------------
Evaluation of an elements of a program in muPuppet
------------------------------------------------------------------------------}
evalProgEle :: States ProgramEle -> Name -> States ProgramEle
-- evaluate the definition of a node
-- it corresponds to the rules NODEMatch and NODEnoMatch
evalProgEle (env, defEnv, cv, (Node n s)) name | name == n = (env, defEnv, cv, ProStatement (ScopeStat SNode s))
| otherwise = (env, defEnv, cv, ProSkip)
-- evaluate the definition of a class
-- it covers the rules CDEF, CDEFI, CDEFP and CDEFPI
evalProgEle (env, defEnv, cv, (Class a p b s)) name =
case (isDef defEnv a) of
False -> (env, (defEnv ++ [(a, (ClassDef b p s))]), cv, ProSkip)
True -> error "Class is defined"
-- evaluate the definition of a defined resource type
-- it covers the rules RDEF
evalProgEle (env, defEnv, cv, (DefResType t p s)) name =
case (isDef defEnv t) of
False -> (env, (defEnv ++ [(t, (ResTypeDef t p s))]), cv, ProSkip)
True -> error "Resource type is defined"
-- evaluate the end of a program to ProSkip statement
evalProgEle (env, defEnv, cv, (ProStatement Skip)) name =
(env, defEnv, cv, ProSkip)
-- use the evaluation for statement to evaluate a statement in program level
-- it corresponds to the rule TopScope
evalProgEle (env, defEnv, cv, (ProStatement s)) name =
let (env', defEnv', cv', s') = (evalStat (env, defEnv, cv, s) STop)
in (env', defEnv', cv', (ProStatement s'))
{------------------------------------------------------------------------------
Evaluation of a program in muPuppet (a manifest in Puppet)
------------------------------------------------------------------------------}
evalProg :: States Program -> Name -> States Program
-- evaluate an empty program to an empty program
evalProg (env, defEnv, cv, []) n = (env, defEnv, cv, [])
-- evaluate a list of elements of a program
-- it corresponds to the rule MSEQSkip
evalProg (env, defEnv, cv, (ProSkip:ps)) n = (env, defEnv, cv, ps)
-- evaluate a list of elements of a program
-- it corresponds to the rule MSEQStep
evalProg (env, defEnv, cv, (p:ps)) n = let (env', defEnv', cv', p') = (evalProgEle (env, defEnv, cv, p) n) in (env', defEnv', cv', (p':ps))
{------------------------------------------------------------------------------
Evaluate AST for Puppet program & return the catalog
------------------------------------------------------------------------------}
evalPuppet :: CState -> AST -> IO (Either [String] Catalog)
evalPuppet st raw_ast = do { return (Right catalog') } where
-- evaluate in steps
(env', defEnv', catalog', ast') =
evalNSteps steps (env, defEnv, catalog, ast)
-- initial values
env = []
defEnv = []
catalog = []
name = nodeName $ sOpts st
steps = stepLimit $ sOpts st
showTrace = (verbosity $ sOpts st) /= Normal
ast = case mainClass $ sOpts st of
Nothing -> raw_ast
Just main -> raw_ast ++ [ProStatement (Include main)]
-- evaluate steps with a trace of each step (if showTrace true)
evalNSteps limit states@(_,_,_,ast) =
if showTrace
then trace (show ast) states'
else states'
where states' = evalNSteps' limit states
-- evaluate steps
-- stop when we program is reduced to a skip
-- or stop when steplimit is reached
evalNSteps' limit states@(_,_,_,ast) =
case ast of
-- reduced to a skip
[] -> states
-- check the limit
otherwise -> case limit of
Just 0 -> error ("Evaluation incomplete with result " ++ show states)
Just n -> evalNSteps (Just (n-1)) states'
Nothing -> evalNSteps Nothing states'
where
states' = evalProg states name
| dcspaul/uPuppet | Src/UPuppet/Eval.hs | mit | 31,319 | 11 | 19 | 8,372 | 10,317 | 5,651 | 4,666 | 290 | 23 |
module Antiqua.Geometry.Circle where
import Data.STRef
import Control.Applicative
import Control.Monad.ST
import Antiqua.Common
bresenham :: XY -> Int -> [XY]
bresenham (x0, y0) r = runST $ do
x <- newSTRef r
y <- newSTRef 0
rError <- newSTRef (1 - r)
list <- newSTRef []
let (+$=) ls (i, j) = modifySTRef' ls ((i + x0, j + y0):)
let go = do xx <- readSTRef x
yy <- readSTRef y
if (xx < yy)
then return list
else do let pts = concat [[ (i, j) | i <- [-xx..xx], j <- [-yy,yy]]
,[ (j, i) | i <- [-xx,xx], j <- [-yy..yy]]
]
sequence $ ((+$=) list) <$> pts
modifySTRef' y (+1)
re <- readSTRef rError
if (re < 0)
then do modifySTRef' rError ((+) (2*yy + 1))
else do modifySTRef' x (subtract 1)
modifySTRef' rError ((+) (2*(yy - xx + 1)))
go
g <- go
readSTRef g
| olive/antiqua-prime | src/Antiqua/Geometry/Circle.hs | mit | 1,139 | 0 | 26 | 532 | 456 | 236 | 220 | 28 | 3 |
module GHCJS.DOM.SVGPathSegMovetoAbs (
) where
| manyoo/ghcjs-dom | ghcjs-dom-webkit/src/GHCJS/DOM/SVGPathSegMovetoAbs.hs | mit | 49 | 0 | 3 | 7 | 10 | 7 | 3 | 1 | 0 |
module Rx.Observable.MergeTest (tests) where
import Test.HUnit
import Test.Hspec
import Control.Concurrent.Async (async, wait)
import Control.Monad (forM_, replicateM, replicateM_)
import qualified Rx.Observable as Rx
import qualified Rx.Subject as Rx
tests :: Spec
tests =
describe "Rx.Observable.Merge" $
describe "merge" $
it "completes after all inner Observables are completed" $ do
let innerSubjectCount = 10
subjects@(firstSubject:subjects1) <-
replicateM innerSubjectCount Rx.newPublishSubject
sourceSubject <- Rx.newPublishSubject
let source = Rx.foldLeft (+) 0
$ Rx.merge
$ Rx.toAsyncObservable sourceSubject
aResult <- async $ Rx.toMaybe source
forM_ subjects $ \subject -> do
Rx.onNext sourceSubject $ Rx.toAsyncObservable subject
replicateM_ 100 $ Rx.onNext subject (1 :: Int)
Rx.onCompleted sourceSubject
-- If merge doesn't wait for inner observables
-- this numbers should not be in the total count
mapM_ Rx.onCompleted subjects1
replicateM_ 50 $ Rx.onNext firstSubject 1
mapM_ Rx.onCompleted subjects
mResult <- wait aResult
case mResult of
Just result ->
assertEqual "should be the same as folding"
(innerSubjectCount * 100 + 50)
result
Nothing ->
assertFailure "Rx failed when it shouldn't have"
| roman/Haskell-Reactive-Extensions | rx-core/test/Rx/Observable/MergeTest.hs | mit | 1,525 | 0 | 14 | 470 | 345 | 172 | 173 | 35 | 2 |
module Text.Documentalist.Types.DocBlock ( DocBlock(..)
, Paragraph(..)
, Span(..)
, Code(..)
, DocParam(..)
, Result(..)
) where
import Control.Monad.Writer.Strict
import Data.List
import Data.Maybe
import Text.Documentalist.Types.Package
import Text.Documentalist.PrettyPrint
-- | A documentation block for a declaration.
data DocBlock = DocBlock
{ summary :: Paragraph
, description :: [Paragraph]
, parameters :: [DocParam]
, example :: Maybe Code
, result :: Maybe Result
} deriving (Show, Eq)
instance PrettyPrint DocBlock where
pprint doc =
let desc = description doc
params = parameters doc
ex = example doc
res = result doc
show' :: Writer [String] ()
show' = do
tell ["# Summary: " ++ show (summary doc)]
unless (null desc) $ tell ["# Description:\n" ++ show desc ++ "\n"]
unless (null params) $ tell ["# Parameters:\n" ++ show params ++ "\n"]
when (isJust ex) $ tell ["# Example:\n" ++ show (fromJust ex) ++ "\n"]
when (isJust res) $ tell ["# Result: " ++ show (fromJust res)]
in '\n' : intercalate "\n" (execWriter show')
-- | A newline-delimited section of text.
data Paragraph = TextParagraph [Span]
| CodeBlock Code
| QuotedText Paragraph
deriving (Show, Eq)
instance PrettyPrint Paragraph where
pprint (TextParagraph spans) = show spans
pprint (CodeBlock (Code str)) = "```\n" ++ str ++ "\n```"
pprint (QuotedText para) = init $ unlines $ map ("> " ++) $ lines $ show para
pprintList paras = (++) $ intercalate "\n\n" $ map show paras
-- | Represents a portion of text in a documentation string.
data Span = PlainText String
| Reference (Declaration (Maybe DocBlock))
| WebLink String
| InlineImage String
| InlineCode Code
| EmphasizedText Span
| StrongText Span
| UnderlinedText Span
deriving (Show, Eq)
instance PrettyPrint Span where
pprint (PlainText str) = str
pprint (Reference decl) = "ref:" ++ show decl
pprint (WebLink url) = "link:" ++ url
pprint (InlineImage url) = "img:" ++ url
pprint (InlineCode (Code str)) = "`" ++ str ++ "`"
pprint (EmphasizedText span) = "_" ++ show span ++ "_"
pprint (StrongText span) = "*" ++ show span ++ "*"
pprint (UnderlinedText span) = "_" ++ show span ++ "_"
pprintList spans = (++) $ unwords $ map show spans
-- | A block or span of code in the source language.
newtype Code = Code String
deriving (Show, Eq)
-- | One of the parameters to a Function, or one of the values in an Enumeration.
data DocParam = DocParam (Declaration (Maybe DocBlock)) [Span]
deriving (Show, Eq)
instance PrettyPrint DocParam where
pprint (DocParam _ spans) = show spans
pprintList docs = (++) $ intercalate "\n" $ map show docs
-- | Describes the value that a Function returns to its caller.
newtype Result = Result [Span]
deriving (Show, Eq)
instance PrettyPrint Result where
pprint (Result spans) = show spans
| jspahrsummers/documentalist | Text/Documentalist/Types/DocBlock.hs | mit | 3,405 | 0 | 19 | 1,104 | 981 | 517 | 464 | 71 | 0 |
{
module Lexer (Alex, runAlex, lexToken) where
import Data.Map (Map)
import Alex
}
%wrapper "basic"
$numbers = 0-9 -- digits
$letters = [a-zA-Z] -- alphabetic characters
$eol = [\n] --Enter
tokens :-
"if" { \_ -> ReservedIdent "if" }
"and" { \_ -> ReservedIdent "and" }
"break" { \_ -> ReservedIdent "break" }
"def" { \_ -> ReservedIdent "def" }
"else" { \_ -> ReservedIdent "else" }
"for" { \_ -> ReservedIdent "for" }
"import" { \_ -> ReservedIdent "import" }
"in" { \_ -> ReservedIdent "in" }
"lambda" { \_ -> ReservedIdent "lambda" }
"not" { \_ -> ReservedIdent "not" }
"or" { \_ -> ReservedIdent "or" }
"return" { \_ -> ReservedIdent "return" }
"while" { \_ -> ReservedIdent "while" }
"False" { \_ -> ReservedIdent "False" }
"None" { \_ -> ReservedIdent "None" }
"True" { \_ -> ReservedIdent "True" }
{
-- Each action has type :: String -> Token
-- The token type:
data Token =
Let |
In |
Sym Char |
Var String |
Int Int
deriving (Eq,Show)
main = do
s <- getContents
print (alexScanTokens s)
}
| joshuapassos/GCET525-Compiladores | vodka.hs | mit | 1,122 | 36 | 10 | 308 | 440 | 212 | 228 | -1 | -1 |
main = do
src <- readFile "quux.txt"
putStr $ wordCount src
where wordCount input = (show . length . lines $ input) ++ "\n"
| adz/real_world_haskell | ch01/read_from_file_explicitly.hs | mit | 131 | 0 | 11 | 31 | 55 | 26 | 29 | 4 | 1 |
module FeatureModel.Logic
( alternativeChildren
, dimacsFormat
, essentialFeatures
, eval
, featureToPropositionalLogic
, fmToCNFExpression
, fmToPropositionalLogic
, fmToTseitinEncode
, isMandatory
, isAlternative
, isOptional
, optionalFeatures
, orChildren
, ref
) where
import Data.Generics
import FeatureModel.NewTypes.Types
import qualified Data.List as L
import qualified Data.Set as S
import qualified Data.Tree as T (Tree(Node))
import FeatureModel.Types (FeatureExpression(..))
import Funsat.Types
import Prelude hiding (or)
-- The constant expressions for representing True and False.
expTrue = ConstantExpression True
expFalse = ConstantExpression False
-- Check if a expression is an implies expression.
isImpliesExpression :: FeatureExpression -> Bool
isImpliesExpression (Or (Not e1) (e2)) = True
isImpliesExpression otherwise = False
-- Syntatic sugars for building expressions.
(|=>) :: FeatureExpression -> FeatureExpression -> FeatureExpression
e1 |=> e2 = Or (Not e1) e2
(<=>) :: FeatureExpression -> FeatureExpression -> FeatureExpression
e1 <=> e2 = And (Or (Not e1) e2) (Or (Not e2) e1)
(/\) :: FeatureExpression -> FeatureExpression -> FeatureExpression
e1 /\ e2 = And e1 e2
(\/) :: FeatureExpression -> FeatureExpression -> FeatureExpression
e1 \/ e2 = Or e1 e2
foldAnd xs = simplifyExpression (foldr And (expTrue) xs)
foldOr xs = simplifyExpression (foldr Or (expFalse) xs)
ref :: Feature a -> FeatureExpression
ref f = FeatureRef (fId f)
--
-- Expression simplifications
simplifyExpression :: FeatureExpression -> FeatureExpression
simplifyExpression (And e1 e2) = simplifyAnd e1 e2
simplifyExpression (Or e1 e2) = simplifyOr e1 e2
simplifyExpression (Not e) = simplifyNot e
simplifyExpression (FeatureRef f) = FeatureRef f
simplifyExpression (ConstantExpression b) = ConstantExpression b
simplifyAnd :: FeatureExpression -> FeatureExpression -> FeatureExpression
simplifyAnd e1 e2
| (e1 == expFalse) || (e2 == expFalse) = expFalse
| e1 == expTrue = simplifyExpression e2
| e2 == expTrue = simplifyExpression e1
| otherwise = And (simplifyExpression e1) (simplifyExpression e2)
simplifyOr :: FeatureExpression -> FeatureExpression -> FeatureExpression
simplifyOr e1 e2
| (e1 == expTrue) || (e2 == expTrue) = expTrue
| e1 == expFalse = simplifyExpression e2
| e2 == expFalse = simplifyExpression e1
| otherwise = Or (simplifyExpression e1) (simplifyExpression e2)
simplifyNot :: FeatureExpression -> FeatureExpression
simplifyNot e
| e == expTrue = expFalse
| e == expFalse = expTrue
| otherwise = Not (simplifyExpression e)
essentialFeatures :: T.Tree (Feature a) -> [Feature a]
essentialFeatures ftree =
foldFTree (++) (filterMandatory) (filterMandatory) [] ftree
where
filterMandatory ftree = if isMandatory (fnode ftree)
then [fnode ftree]
else []
alternativeChildren :: T.Tree (Feature a) -> [Feature a]
alternativeChildren ftree = concat [children f | f <- subtrees ftree, isAlternative (fnode f)]
orChildren :: T.Tree (Feature a) -> [Feature a]
orChildren ftree = concat [children f | f <- subtrees ftree, isOrFeature (fnode f)]
optionalFeatures :: T.Tree (Feature a) -> [Feature a]
optionalFeatures ftree = [fnode f | f <- subtrees ftree, isOptional (fnode f)]
isMandatory :: Feature a -> Bool
isMandatory f = (fCardinality f) == mandatory
isOptional :: Feature a -> Bool
isOptional f = (fCardinality f) == optional
isAlternative :: Feature a -> Bool
isAlternative f = (gCardinality f) == alternative
isOrFeature :: Feature a -> Bool
isOrFeature f = case gCardinality f of
(Cardinality 1 _) -> True
_ -> False
fmToPropositionalLogic :: FeatureModel a -> [FeatureExpression]
fmToPropositionalLogic fm = rootProposition ++ ftPropositions ++ csPropositions
where
(T.Node f fs) = fmTree fm
ftPropositions = foldFTree (++) (\(T.Node _ []) -> []) (featureToPropositionalLogic) [] (T.Node f fs)
csPropositions = fmConstraints fm
rootProposition = [ref f]
featureToPropositionalLogic :: T.Tree (Feature a) -> [FeatureExpression]
featureToPropositionalLogic ftree =
let f = fnode ftree
cs = children ftree
in (
case gCardinality f of
(Cardinality 0 0) -> [(ref f) |=> (ref c) | c <- cs, fCardinality c == mandatory]
(Cardinality 1 1) -> [(ref f) |=> (foldOr [xor x (L.delete x cs) | x <- cs])]
(Cardinality 1 _) -> [(ref f) |=> (foldOr [ref x | x <- cs])]
) ++ [(ref c) |=> (ref f) | c <- cs]
xor f [] = ref f
xor f xs = And (ref f) (foldAnd [Not (ref x) | x <- xs])
fmToCNFExpression :: FeatureModel a -> FeatureExpression
fmToCNFExpression fm =
let fmExpressions = fmToPropositionalLogic fm
in toCNFExpression (foldAnd fmExpressions)
fmToTseitinEncode :: FeatureModel a -> FeatureExpression
fmToTseitinEncode fm =
let fmExpressions = fmToPropositionalLogic fm
in toTseitinEncode (foldAnd fmExpressions)
toTseitinEncode :: FeatureExpression -> FeatureExpression
toTseitinEncode (Or e1 e2) =
let a1 = newRef [1]
a2 = newRef [2]
in foldAnd ([Or a1 a2] ++ (toTseitinEncode' [1] e1) ++ (toTseitinEncode' [2] e2))
toTseitinEncode (And e1 e2) =
let a1 = newRef [1]
a2 = newRef [2]
in foldAnd ([And a1 a2] ++ (toTseitinEncode' [1] e1) ++ (toTseitinEncode' [2] e2))
toTseitinEncode (Not e1) =
let a1 = newRef [1]
in foldAnd( [Not a1] ++ (toTseitinEncode' [1] e1))
toTseitinEncode e = e
-- toTseitinEncode' _ (FeatureRef e) = []
toTseitinEncode' gs (Or e1 e2) =
let gl = gs ++ [1]
gr = gs ++ [2]
w = newRef gs
w1 = newRef gl
w2 = newRef gr
in [And (Or (Not w) (Or w1 w2) ) (And (Or w (Not w1)) (Or w (Not w2)))] ++
(toTseitinEncode' gl e1) ++
(toTseitinEncode' gr e2)
toTseitinEncode' gs (And e1 e2) =
let gl = gs ++ [1]
gr = gs ++ [2]
w = newRef gs
w1 = newRef gl
w2 = newRef gr
in [And (Or (Not w) w1) (And (Or (Not w) w2) (Or w (Or (Not w1) (Not w2))))] ++
(toTseitinEncode' gl e1) ++
(toTseitinEncode' gr e2)
toTseitinEncode' gs (Not e1) =
let gl = gs ++ [1]
w = newRef gs
w1 = newRef gl
in [And (Or (Not w) (Not w1)) (Or w w1) ] ++ (toTseitinEncode' gl e1)
toTseitinEncode' gs otherwise = []
toCNFExpression :: FeatureExpression -> FeatureExpression
toCNFExpression (And e1 e2) = And (toCNFExpression e1) (toCNFExpression e2)
toCNFExpression (Or e1 e2) = distributeAndOverOr e1 e2
toCNFExpression (Not e1) = moveNotInwards e1
toCNFExpression (FeatureRef f) = (FeatureRef f)
distributeAndOverOr :: FeatureExpression -> FeatureExpression -> FeatureExpression
distributeAndOverOr (And x y) e2 = And (toCNFExpression (Or x e2)) (toCNFExpression(Or y e2))
distributeAndOverOr e1 (And x y) = And (toCNFExpression(Or e1 x)) (toCNFExpression(Or e1 y))
distributeAndOverOr e1 e2 = distributeAndOverOr' a b
where
distributeAndOverOr' (And x y) e = toCNFExpression (Or (And x y) e)
distributeAndOverOr' e (And x y) = toCNFExpression (Or e (And x y))
distributeAndOverOr' x y = Or (toCNFExpression x) (toCNFExpression y)
a = toCNFExpression e1
b = toCNFExpression e2
moveNotInwards :: FeatureExpression -> FeatureExpression
moveNotInwards (And x y) = Or (toCNFExpression (Not x)) (toCNFExpression (Not y))
moveNotInwards (Or x y) = And (toCNFExpression (Not x)) (toCNFExpression (Not y))
moveNotInwards (Not x) = toCNFExpression x
moveNotInwards e = Not e
type Gate = Integer
newRef :: [Gate] -> FeatureExpression
newRef gs = FeatureRef (foldl (++) "g" [show g | g <- gs])
dimacsFormat :: FeatureExpression -> CNF
dimacsFormat exp =
let vars = getVars exp
cs = map (expToLiterals vars) (getClauses exp)
in CNF {
numVars = length vars,
numClauses = length cs,
clauses = S.fromList cs
}
eval :: FeatureConfiguration a -> FeatureExpression -> Bool
eval config (FeatureRef f) = elem f [fId x | x <- flatten (fcTree config)]
eval config (Not e) = not (eval config e)
eval config (And e1 e2) = (eval config e1) && (eval config e2)
eval config (Or e1 e2) = (eval config e1) || (eval config e2)
eval _ (ConstantExpression e) = e
| hephaestus-pl/hephaestus | alexandre/feature-modeling/src/FeatureModel/Logic.hs | mit | 8,451 | 0 | 20 | 1,927 | 3,227 | 1,630 | 1,597 | 184 | 3 |
import Peptide
import Data.ByteString.Char8
main = print $ results
where sequence = pack "V(3D)NK(3F)NKEXCNZRAIEUALDPNLNDQQFHUKIWZIIXDC"
weight = 2194.9
results = calculationResults weight sequence
| stuartnelson3/molecule-weight | haskell/src/test.hs | mit | 220 | 0 | 7 | 44 | 44 | 24 | 20 | 6 | 1 |
module Unification(
Equation,
Equations,
Substitution,
TypeExpr(..),
unify,
applySubstitution) where
import Data.Set as S
import Control.Monad
data TypeExpr = TEVar Int
| TEConst Int
| TEArrow TypeExpr TypeExpr
deriving(Eq, Show,Ord)
-- equations between type terms
type Equation = (TypeExpr, TypeExpr)
-- system of equations
type Equations = Set Equation
-- A substitution is a set of equations of the form 'variable' = 'type'
type Substitution = Set (Int, TypeExpr)
filterElement :: (a -> Bool) -> Set a -> Maybe a
filterElement f set = let filtered = S.filter f set
in if size filtered > 0
then Just $ (head . toList) filtered
else Nothing
-- Find an equation which has the arrow term constructor in both sides
findArrowEquation :: Equations -> Maybe Equation
findArrowEquation = filterElement arrow
where arrow (TEArrow _ _, TEArrow _ _) = True
arrow _ = False
-- STEP 1: Type Reduction
termReduction :: Equations -> Equations
termReduction equations =
case findArrowEquation equations of
Just arrow@(TEArrow t1 t2, TEArrow t1' t2') ->
let newSet = S.union (fromList [(t1, t1'), (t2, t2')])
(S.delete arrow equations)
in termReduction newSet
Nothing -> equations
_ -> error "unexpected value from findArrowEquation"
-- STEP 2: Check for offending equations
hasOffendingEquations :: Equations -> Bool
hasOffendingEquations = any isOffending
where isOffending (TEConst _, TEArrow _ _) = True
isOffending (TEArrow _ _, TEConst _) = True
isOffending (TEConst c1, TEConst c2) = c1 /= c2
isOffending _ = False
-- Step 3: Filter out identities
isIdentity :: Equation -> Bool
isIdentity (TEVar v, TEVar v') = v == v'
isIdentity _ = False
filterOut :: (a -> Bool) -> Set a -> Set a
filterOut filtr = S.filter (not . filtr)
filterOutIdentities :: Equations -> Equations
filterOutIdentities = filterOut isIdentity
-- STEP 4: Flip equations with variables on the right and not a variable on the
-- left.
flipEquations :: Equations -> Equations
flipEquations = S.map flipEquation
where flipEquation (TEConst c, TEVar v) = (TEVar v, TEConst c)
flipEquation (TEArrow t t', TEVar v) = (TEVar v, TEArrow t t')
flipEquation e = e
-- STEP 5: Substitute
findSubstitutionCandidate :: Equations -> Maybe Equation
findSubstitutionCandidate equations = filterElement filterer equations
where filterer equ@(TEVar v, t) = setContainsVar v (S.delete equ equations)
filterer _ = False
setContainsVar :: Int -> Equations -> Bool
setContainsVar v = any (equationContainsVar v)
where equationContainsVar :: Int -> Equation -> Bool
equationContainsVar v (t,t') = (containsVar v t) || (containsVar v t')
containsVar :: Int -> TypeExpr -> Bool
containsVar _ (TEConst _) = False
containsVar v' (TEVar v) = v == v'
containsVar v (TEArrow t t') = (containsVar v t) || (containsVar v t')
substitute :: Int -> TypeExpr -> TypeExpr -> TypeExpr
substitute _ _ (TEConst c) = TEConst c
substitute v' t (TEVar v) | v == v' = t
| otherwise = TEVar v
substitute v t'' (TEArrow t t') = TEArrow (substitute v t'' t)
(substitute v t'' t')
substituteSet :: Equations -> Maybe Equations
substituteSet equations =
case findSubstitutionCandidate equations of
Just equ@(TEVar v, t) -> do guard ((not . isCircular) equ)
return $ S.insert equ (S.map (substituteEquation v t)
(S.delete equ equations))
Nothing -> Just equations
_ -> error "unexpected value from findSubstitutionCandidate"
where substituteEquation :: Int -> TypeExpr -> Equation -> Equation
substituteEquation v t'' (t,t') = ((substitute v t'' t),
(substitute v t'' t'))
isCircular :: Equation -> Bool
isCircular (TEVar v, TEArrow t t') = (containsVar v t) || (containsVar v t')
isCircular (TEArrow t t', TEVar v) = (containsVar v t) || (containsVar v t')
isCircular _ = False
-- Five-steps step on the algorithm
--guard :: Bool -> Maybe ()
--guard True = Just ()
--guard False = Nothing
step :: Equations -> Maybe Equations
step equations = do let reduced = termReduction equations
guard (not (hasOffendingEquations reduced))
let filteredAndFlipped = (flipEquations . filterOutIdentities) reduced
substituteSet filteredAndFlipped
-- Applies the unification algorithm to find the most general unifier for a set
-- of equations. If it fails, returns Nothing and no unifier exists.
unify :: Equations -> Maybe Substitution
unify equations = do equations' <- step equations
if equations == equations'
then return $ equationsToSubstitution equations
else unify equations'
where equationsToSubstitution = S.map converter
where converter (TEVar v, t) = (v,t)
converter _ = error "unification algorithm didn't yield a substitution"
applySubstitution :: TypeExpr -> Substitution-> TypeExpr
applySubstitution = S.foldr (uncurry substitute)
eqs = fromList [(TEVar 1,TEVar 0),(TEVar 2,TEArrow (TEVar 0) (TEVar 1)),(TEVar 2,TEArrow (TEConst 0) (TEConst 1))]
| fsestini/stlc-machine | src/Unification.hs | mit | 5,583 | 0 | 16 | 1,568 | 1,623 | 833 | 790 | 101 | 5 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-} -- for class Typeish String
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{- |
Description : build Getopt handler from description using template haskell
Copyright : (c) Martyn J. Pearce 2014, 2015
License : BSD
Maintainer : [email protected]
A companion library to Console.GetOptions to allow the developer to specify
required options in a short space & time, and have template haskell generate
the necessary code.
This library uses Template Haskell to generate boilerplate options-handling
code, including records & lenses, aiming to leave options described simply &
concisely, and thus not detracting from the real business of the program.
-}
module Console.GetoptTH
( -- * Synopsis
{- | To use this module:
(0) list the standard imports
(0) call mkopts, with initial args and a list of opt-defining
strings
(0) call getopts to parse your incoming argv
(0) profit
Here is a simple example.
Please note the distinction between /arguments/, which are the standard
textual values given after the program name; and the /options/, which
are optional tweaks given using '--foo' or '-F'; thus in @grep
-r --include \\*.hs hlib GetOptions@, the @hlib@ and @GetOptions@ are
considered arguments, while the @-r@ and the @--include \\*.hs@ are
considered options.
You must use the __LANGUAGE TemplateHaskell__ option. This is not
shown in the literal examples below, because I cannot work out how to
render them within Haddock.
@
import Control.Monad ( forM_ )
import Data.Default ( Default( def ) )
import Control.Lens ( (^.) )
import Fluffy.Language.TH.Type ( readType )
import Console.Getopt ( ArgArity( ArgSome ) )
import Console.GetoptTH ( FileRO, mkopts )
$( mkopts "getopts" (ArgSome 1 3) "integer"
[ "bool|b::#just a bool"
, "s|string::String#a string\nlong description"
, "maybe-i|I::?Int#maybe integer summary\no default"
, "handle::filero</etc/motd>#read-only file\nauto-opened"
, "floats1::[,Float]<[1.0,2.0]><>#list of floats\nsplit on ','"
])
main :: IO ()
main = do
(args, opts) <- getopts (return . (readType "Int" :: String -> Int))
forM_ [ "ARGS: " ++ show args, "OPTS: " ++ show opts ] putStrLn
putStrLn $ "bool : " ++ show (opts ^. bool)
putStrLn $ "s : " ++ show (opts ^. s)
putStrLn $ "mebbei : " ++ show (opts ^. maybe_i)
putStrLn $ "handle : " ++ show (opts ^. handle)
putStrLn $ "floats1: " ++ show (opts ^. floats1)
@
Line by line; we start with
> {-# LANGUAGE TemplateHaskell #-}
We are using TemplateHaskell to allow us to generate the required code at
compile time (this enables the splice @$( ... )@ among other things).
Of the imports, these are required for use of GetoptTH:
> import Data.Default ( Default( def ) )
> import Console.Getopt ( ArgArity( .. ) )
> import Console.GetoptTH ( mkopts )
Data.Default is required because of the use of @def@ in the generated
code. mkopts is required because that's what you call in the splice (the
bit between the @ $( ... ) @. @ArgArity@ is required to tell mkopts how
many arguments your program.
The other imports shown in the full example are required for our example,
but not for every use of GetoptTH.
@
$( mkopts "getopts" (ArgSome 1 3) "integer"
[ "bool|b::#just a bool"
, "s|string::String#a string\nlong description"
, "maybe-i|I::?Int#maybe integer summary\no default"
, "handle::filero\<\/etc\/motd\>#read-only file\nauto-opened"
, "floats1::[,Float]\<[1.0,2.0]\>\<\>#list of floats\nsplit on ','"
])
@
This is where the real action happens. The mkopts call takes the name of
a function to create; a note of how many arguments your program accepts
(in the example given, between 1 & 3 inclusive); a descriptive text for
what those arguments look like (in this case, integers); and then a list
of option descriptors (each a string)
Those option descriptor strings take the general form
> NAME(|NAME)*>LENS::TYPE(<DEFAULT>)?(<START>)?#SUMMARY(\nDESCRIPTION)?
The names specify the names of options on the cmdline.
Single-character names must be preceded with a single '-';
multi-character names must be preceded with '--' when used on the
cmdline. Names may consist of alphanumeric characters (in either
case), or digits or hyphens (except for the first character). Names
are case-sensitive.
By default, the name of the generated lens is the name of the first
given name; excepting that any hyphens are replaced with underscores.
However, a lens name may be explicitly given with a @>LENS@ in the
options string.
The lens name, whether derived from the first option name or given
explicitly, must satisfy the rules for a valid lens name, which are:
(0) The name may not be empty
(0) The first character of the name must be a lower-case letter
(0) All subsequent characters of the name may be letter characters
(of either case), digits, or underscores
The type name specifies the target type of the lensed value.
@
main :: IO ()
main = do
(args, opts) \<- getopts (return . (readType "Int" :: String -\> Int))
forM_ [ "ARGS: " ++ show args, "OPTS: " ++ show opts ] putStrLn
putStrLn $ "bool : " ++ show (opts ^. bool)
putStrLn $ "s : " ++ show (opts ^. s)
putStrLn $ "mebbei : " ++ show (opts ^. maybe_i)
putStrLn $ "handle : " ++ show (opts ^. handle)
putStrLn $ "floats1: " ++ show (opts ^. floats1)
@
-}
CmdlineParseable(..), FileRO, mkopts
)
where
-- THE PLAN: the programmer will create options using mkopts or similar. An
-- 'option' is a description of how to parse command line strings,
-- such that a set of Option Values is generated. Given a function
-- name (first argument to mkopts) of "foo", (we'll call this the
-- getopt_th fn) two records will be generated: "Foo" (the Option
-- Value (OV) record) and "Foo__" (the PCLV record). The latter is
-- the set of Parsed Command-Line Values (PCLVs); these will be
-- transformed into the former by applying option defaults and then
-- any IO actions required to generate Option Values. The function
-- that does this shall be called the 'effector'.
-- base --------------------------------
import Control.Exception ( Exception(..), SomeException, evaluate, try )
import Control.Monad ( forM_, mapAndUnzipM )
import Data.List ( partition )
import Data.Maybe ( fromJust )
import Data.Typeable ( Typeable )
-- data-default ------------------------
import Data.Default ( Default( def ) )
-- deepseq -----------------------------
import Control.DeepSeq ( NFData, force )
-- lens --------------------------------
import Control.Lens ( (^.), _2 )
-- template-haskell --------------------
import Language.Haskell.TH ( Dec( SigD )
, Exp( AppE, ConE, CondE, DoE
, ListE, LitE, TupE, VarE )
, ExpQ
, Lit( StringL )
, Name
, Pat( TupP, VarP )
, Pred( ClassP )
, Q
, Stmt( BindS, NoBindS )
, Type( AppT, ConT, ForallT, ListT, VarT )
, TyVarBndr( PlainTV )
, mkName, nameBase, newName, varE
)
import Language.Haskell.TH.Lib ( DecsQ, appE )
import Language.Haskell.TH.Syntax ( lift )
-- transformers ------------------------
import Control.Monad.IO.Class ( liftIO )
import Control.Monad.Trans.Writer.Strict ( WriterT, runWriterT, tell )
-- fluffy ------------------------------
import Fluffy.Data.String ( ucfirst )
import Fluffy.Language.TH ( appTIO, assignN, composeApE, composeE
, infix2E , listOfN, mAppE, mAppEQ
, mkSimpleTypedFun, stringEQ
, tsArrows, tupleL
)
import Fluffy.Language.TH.Record ( mkLensedRecord, mkLensedRecordDef )
import Fluffy.Sys.Exit ( exitUsage )
import Fluffy.Text.PCRE ( subst )
-- this package --------------------------------------------
import Console.Getopt.ArgArity ( ArgArity(..), liftAA )
import Console.Getopt ( HelpOpts(..), Option
, getopts, helpme, mkOpt, errOut )
import Console.Getopt.CmdlineParseable ( CmdlineParseable(..), FileRO )
import Console.Getopt.OptDesc ( OptDesc
, descn, dfGetter, dfltTxt, enactor
, name, names, optSetVal
, precordDefFields, recordFields
, summary, typename
)
--------------------------------------------------------------------------------
-- PUBLIC INTERFACE --
--------------------------------------------------------------------------------
-- mkopts ----------------------------------------------------------------------
{- | primary entry point for options generation
@
$( mkopts "getoptsx" (ArgSome 1 3) "integer"
[ "s|string\>str::String#string summary"
, "i|int|Int::Int\<4\>#integer summary\ndefault 4"
, "C\>incr::incr#increment summary\nincrement int longhelp"
, "decr|D::decr\<6\>#decrement summary\ndecrement int longhelp"
, "handle::filero\</etc/motd\>#read-only file\nauto-opened"
]
)
main :: IO ()
main = do
(args, opts) <- getoptsx (return . (readType \"Int\" :: String -> Int))
forM_ [ "ARGS: " ++ show args, "OPTS: " ++ show opts ] putStrLn
putStrLn $ "s: " ++ show (opts ^. s)
putStrLn $ "i: " ++ show (opts ^. i)
putStrLn $ "incr: " ++ show (opts ^. incr)
putStrLn $ "decr: " ++ show (opts ^. decr)
@
Call this within a splice, providing the name of a fn to generate, along
with defining parameters. That function will be generated, returning
parsed arguments and options, having eagerly/strictly parsed them (so that
any relevant errors are found at this time).
The option strings are compiled. The syntax is:
> {optnames}(>{lensname})?::{type}(<{default})>?#{shorthelp}(\n{longhelp})?
* optnames
A list of option names, separated by @|@, to be used for invocation.
* Each name may be single character, being invoked with @-c@, or
multi-character, being invoked with @--chars@.
* Each name may consist of some combination of letters, numbers, and
hyphens (@-@). The first (or only) character must alphanumeric.
* Each name must be unique across the set of options.
* Option names are case-sensitive; @c@ and @C@ are distinct option
names.
The option name list may not be empty.
* lensname
The name of the lens to target with this option. The same naming rules
apply as for optnames, with the following exceptions:
* hyphens (@-@) are not permitted; underscores (@_@) are,
* the name may begin with an underscore or an alphabetic character
(note not a number).
If a lensname is not given, then the first of the given option names is
used.
* any hyphens are replaced with underscores,
* an option name with a leading digit is preceded in the lensname with
an underscore.
Each lensname must be unique both within the option set and be a unique
function name within the compilation context.
The lensname, whether explicit or implicitly 'inherited' from the option
names, may not begin with a capital letter (because it becomes a function
in code)
* type
The value type of the option. This is the type of the value that is
expected to be returned by the lens; internally, a different type
(typically using an encapsulating monad, e.g., Maybe t).
The available types are:
* Standard haskell types, as an alphanumeric string. Compound types
(with spaces in the name) are not currently supported. The following
types have a natural default:
* String - ""
* Int - 0
For those that don't have a natural default, if no default is
supplied in the option specification and the user doesn't invoke the
given option; then an error will be generated at option parsing time
- thus you effectively have mandatory options.
* incr
Takes no value on the cmdline; each invocation increases the option
value. Starting value is 0, or the default value if specified. User
sees an Int.
* decr
Takes no value on the cmdline; each invocation increases the option
value. Starting value is 0, or the default value if specified. User
sees an Int.
* filero
Takes the name of a file as cmdline argument; opens the file RO, and
returns a handle to the file. Generates an error at options-parsing
runtime if no user value is supplied, and there is no default.
* ?t or Maybe t
This is like t, except that the value is wrapped in a Maybe; if no
value is provided on the command line is provided, you get Nothing.
Compilation will fail if you provide a default value
(with <default>); at that point, the use of Maybe doesn't make a
whole lot of sense (since you would be guaranteed a Just something).
If you really, really want a Maybe type with a default, use an
explicit Maybe.type (which will also mean that you can specify
"Nothing" as an option value; but conversely that you'll also need to
specify "Just x" explitly as an option value).
* [t]
* default
The value to return to the caller if the option is never invoked by the
user. The string provided is given to read.
* shorthelp
short summary used with --help; pref < 50 chars
* longhelp
long help used with --help=optname
-}
mkopts :: String -- ^ name of the getopts fn to
-- create, e.g., "optCfg"
-> ArgArity -- ^ arity of arguments
-> String -- ^ arg type (for help text)
-- we use just a big string to allow for optionality of bits
-> [String] {- ^ opt configurations;
config string, default value,
long descripton
-}
-> DecsQ
mkopts getoptName arity argtype optcfgs = do
{- mkopts "getoptsx" (ArgSome 1 3) "integer"
[ "s|string::String#summary"
, "incr|C::incr#increment summary\nincrement int longhelp"
, "handle::filero</etc/motd>#read-only file\nauto-opened"
]
generates something like
data Getoptsx__ = Getoptsx__ { _s__ :: Maybe String
, _incr__ :: Int
, _handle__ :: Maybe FilePath
}
deriving Show
instance Default Getoptsx__ where def = Getoptsx__ Nothing 0 Nothing
s___ :: Lens' Getoptsx__ (Maybe String)
s___ a b = fmap asn (a $ (_s___ b))
where asn x = b { _s___ = x }
incr___ :: Lens' Getoptsx_ Int
incr___ a b = fmap asn (a $ (_incr___ b))
where asn x = b {_incr___ = x }
handle___ :: Lens' Getopts__ (Maybe FilePath)
handle___ a b = fmap asn (a $ ( _handle___ b))
where asn x = b { _handle___ = x }
data Getoptsx = Getoptsx { _s :: String
, _incr :: Int
, _handle :: Handle
}
deriving Show
s :: Lens' Getoptsx String
s a b = fmap asn (a $ (_s b)) where asn x = b { _s = x }
incr :: Lens' Getoptsx Int
incr a b = fmap asn (a $ (_incr b)) where asn x = b { _incr = x }
handle :: Lens' Getoptsx Handle
handle a b = fmap asn (a $ (_handle b)) where asn x = b { _handle = x }
getoptsx_ :: [Option Getoptsx_]
getoptsx_ = [ mkOpt "s" ["string"] (setval parseAs "String" s___)
"string summary" "" "Maybe String" "\"\""
, mkOpt "C" ["incr"] (setvalc incr___)
"increment summary" "increment int longhelp" "Int" "0"
, mkOpt "" ["handle"] (setval return handle___)
"read-only file" "auto-opened"
"Maybe FilePath" "GHC.Base.id \"/etc/motd\"",
, mkOpt "" ["help"] helpme (def { arg_arity = ArgSome 1 3
, argtype = "integer" })
"this help" "Provide help text..." "" ""
]
getoptsx_effect :: Getoptsx__ -> IO (Either [NFException] Getoptsx)
getoptsx_effect pclv = do
((string_x', incr_x', handle_x'), exs) <- runWriterT $ do
string_x <- tryWriteF $
return (((fromMaybe "") . (view s___)) pclv);
incr_x <- tryWriteF $ return (view incr___ pclv)
handle_x <- tryWriteF $
openFileRO (((fromMaybe "/etc/motd")
. (view handle___)) pclv)
return (string_x, incr_x, handle_x)
return $ if null exs
then Right $ Getoptsx (fromJust string_x')
(fromJust incr_x')
(fromJust handle_x')
else Left exs
getoptsx :: (NFData a, Show a) => (String -> IO a) -> IO ([a], Getoptsx)
getoptsx = (t2apply (checkEx . getoptsx_effect))
. (getopts getoptsx_ (ArgSome 1 3) "integer")
-}
let optdescs :: [OptDesc]
optdescs = fmap read optcfgs
-- assign a list of options (returned by mkOpt) to a name
-- (getoptsx_ :: [Option Getoptsx_]; getoptsx_ = [ mkOpt ... ] above)
mkopts_ts :: Type
mkopts_ts = AppT ListT (AppT (ConT ''Option) (pclv_typename getoptName))
-- create a record to hold PCLVs. This is created in one pass; and when
-- it comes to creating the OVs record, defaults are inserted as necessary
-- and IO is performed to produce the user-visible opts record
-- (data Getoptsx__ = Getoptsx__ { ... } above)
precord :: DecsQ
precord = mkLensedRecordDef (pclv_typename getoptName)
(fmap precordDefFields optdescs)
[''Show]
-- create a record to hold final values to pass back to the user;
-- (data Getoptsx = Getoptsx { ... } above)
record :: DecsQ
record = mkLensedRecord (ov_typename getoptName)
(fmap recordFields optdescs)
[''Show]
opts :: [Exp] <- sequence $ fmap mkopt optdescs ++ [ helpmeQ arity argtype ]
-- assign a list of mkOpt calls to the chosen var
-- (getoptsx_ :: [Option Getoptsx_];getoptsx_ = [ mkOpt ... ] above)
let asgn_mkopts :: DecsQ
asgn_mkopts = return [ -- getoptsx_ :: [Option Getoptsx_]
SigD (cfg_name getoptName) mkopts_ts
, -- getoptsx_ = [ mkOpt ... ]
assignN (cfg_name getoptName) (ListE opts)
]
concatM [ precord -- (data Getoptsx__ = Getoptsx__ { ... } above)
, record -- (data Getoptsx = Getoptsx { ... } above)
, asgn_mkopts -- (getoptsx_ ... above)
-- (getoptsx_effect :: Getoptsx__ ->
-- IO (Either [NFException] Getoptsx)
-- getoptsx_effect pclv = pclv -> do { ... }
-- getoptsx :: (NFData a, Show a) => (String -> IO a)
-- -> IO ([a], Getoptsx)
-- getoptsx = (t2apply (checkEx . getoptsx_effect))
-- . (getopts getoptsx_ (ArgSome 1 3) "integer")
-- above)
, mkGetoptTH optdescs getoptName arity argtype
]
--------------------------------------------------------------------------------
-- INTERNAL FUNCTIONS --
--------------------------------------------------------------------------------
-- mkGetoptTH ------------------------------------------------------------------
{- | generate effector & getopts_th fn
(getoptsx_effect :: Getoptsx__ -> IO (Either [NFException] Getoptsx)
getoptsx_effect pclv = do
((string_x', incr_x', handle_x'), exs) <- runWriterT $ do
string_x <- tryWriteF $
return (((fromMaybe "") . (view s___)) pclv);
incr_x <- tryWriteF $ return (view incr___ pclv)
handle_x <- tryWriteF $
openFileRO (((fromMaybe "/etc/motd")
. (view handle___)) pclv)
return (string_x, incr_x, handle_x)
return $ if null exs
then Right $ Getoptsx (fromJust string_x')
(fromJust incr_x')
(fromJust handle_x')
else Left exs
getoptsx :: (NFData a, Show a) => (String -> IO a)
-> IO ([a], Getoptsx)
getoptsx = (t2apply (checkEx . getoptsx_effect))
. (getopts getoptsx_ (ArgSome 1 3) "integer")
above)
-}
mkGetoptTH :: [OptDesc] -- ^ options set
-> String -- ^ name of fn to generate (getoptsx above)
-> ArgArity -- ^ arity of arguments
-> String -- ^ arg type (for help text)
-> Q [Dec]
mkGetoptTH optdescs getoptName arity argtype = do
typeSig <- mkGetoptTHTypeSig (ov_typename getoptName)
eff <- mkEffector optdescs getoptName
let getopt_th = mk_getopt_th typeSig getoptName arity argtype
return $ eff ++ getopt_th
-- mkEffector ------------------------------------------------------------------
{- | create effector, including type sig
getoptsx_effect pclv = do
((string_x', incr_x', handle_x'), exs) <- runWriterT $ do
string_x <- tryWriteF $
return (((fromMaybe "") . (view s___)) pclv);
incr_x <- tryWriteF $ return (view incr___ pclv)
handle_x <- tryWriteF $
openFileRO (((fromMaybe "/etc/motd")
. (view handle___)) pclv)
return (string_x, incr_x, handle_x)
return $ if null exs
then Right $ Getoptsx (fromJust string_x')
(fromJust incr_x')
(fromJust handle_x')
else Left exs
above)
-}
mkEffector :: [OptDesc] -- ^ option field list
-> String -- ^ getopt_th name
-> Q [Dec]
mkEffector optdescs getoptName = do
let effectorSig = tsArrows [ pclv_typename getoptName
, appTIO (AppT (AppT (ConT ''Either)
(AppT ListT
(ConT ''NFException)))
(ov_typename getoptName))
]
pclv <- newName "pclv" -- name of the parameter to the effector; which is
-- a PCLV record
effectorBody <- mkEffectorBody optdescs getoptName pclv
return $ mk_effector effectorSig (effect_name getoptName) pclv effectorBody
-- mk_effector -----------------------------------------------------------------
-- | create effector fn, inc. type signature
--
-- (getoptsx_effect :: Getoptsx__ -> IO (Either [NFException] Getoptsx)
-- getoptsx_effect g = do { ... }
-- above)
mk_effector :: Type -- ^ type signature of the fn to create
-> String -- ^ name of the effector fn to create
-> Name -- ^ function parameter name
-> Exp -- ^ effector body
-> [Dec]
mk_effector ts nam g = mkSimpleTypedFun ts (mkName nam) [g]
-- mk_getopt_th ----------------------------------------------------------------
-- | generated getopts-like fn
--
-- (getoptsx :: (NFData a, Show a) => (String -> IO a) -> IO ([a], Getoptsx)
-- getoptsx = (t2apply (checkEx . getoptsx_effect))
-- . (getopts getoptsx_ (ArgSome 1 3) "integer")
-- above)
mk_getopt_th :: Type -- ^ type signature of generated fn
-> String -- ^ name of fn to generate (getoptsx above)
-> ArgArity -- ^ arity of arguments
-> String -- ^ arg type (for help text)
-> [Dec]
mk_getopt_th sig getoptName arity argtype =
mkSimpleTypedFun sig (mkName getoptName) [] (infix2E lhs (VarE '(.)) rhs)
where -- (getopts getoptsx_ (ArgSome 1 3) "integer" above)
rhs = mAppE [ VarE 'getopts, cfg_name getoptName
, liftAA arity, (LitE . StringL) argtype ]
-- (t2apply (checkEx . getoptsx_effect) above)
lhs = AppE (VarE 't2apply)
(composeE (VarE 'checkEx) (effect_name getoptName))
-- mkGetoptTHTypeSig -----------------------------------------------------------
{- | type signature for generated getopts-like fn, e.g.,
> (NFData a, Show a) =>
> ArgArity -> String -> (String -> IO a) -> IO ([a], Getoptsx)
-}
mkGetoptTHTypeSig :: Type -> Q Type
mkGetoptTHTypeSig t = do
a <- newName "a"
return .
ForallT [PlainTV a] [ClassP ''NFData [VarT a], ClassP ''Show [VarT a]] $
tsArrows [ -- ConT ''ArgArity
-- , ConT ''String
-- ,
tsArrows [ ConT ''String , appTIO(VarT a) ]
,
appTIO (tupleL [ listOfN a, t ])
]
-- Typeish ---------------------------------------------------------------------
-- | a type, given a String name, represented as a Name or a Type as necessary
class Typeish t where
convert :: String -> t
instance Typeish String where convert = id
instance Typeish Name where convert = mkName
instance Typeish Type where convert = ConT . mkName
instance Typeish Exp where convert = VarE . mkName
-- ov_typename -----------------------------------------------------------------
-- | given a name (the name of the getoptth fn to create), what type name shall
-- we use for the OV record?
ov_typename :: (Typeish t) => String -> t
ov_typename = convert . ucfirst
-- pclv_typename ---------------------------------------------------------------
-- | given a name (the name of the getoptth fn to create), what type name shall
-- we use for the PCLV record?
pclv_typename :: (Typeish t) => String -> t
pclv_typename = convert . (++ "__" ) . ov_typename
-- effect_name -----------------------------------------------------------------
-- | given a name (the name of the getoptth fn to create), what name shall we
-- use for the function from PCLV Record to OV Record?
effect_name :: (Typeish t) => String -> t
effect_name = convert . (++ "_effect")
-- cfg_name --------------------------------------------------------------------
-- | name of variable to hold the list of calls to mkOpt (getoptsx_ above)
cfg_name :: (Typeish t) => String -> t
cfg_name = convert . (++ "_")
-- concatM ---------------------------------------------------------------------
concatM :: (Functor m, Monad m) => [m [a]] -> m [a]
concatM = fmap concat . sequence
-- mkopt -----------------------------------------------------------------------
mkopt :: OptDesc -> ExpQ
mkopt optdesc =
let (shorts, longs) = partition ((1==) . length) $ optdesc ^. names
display_type = case dropWhile (`elem` "*?") $ optdesc ^. typename of
'[' : '<' : y -> '[' : (tail . dropWhile (/= '>')) y
'[' : ',' : x -> '[' : x
x -> x
display_dflt = -- trim off any leading 'id '; replace "..." with ...;
-- remove anything Nothing
let disp = ("^\\\"(.+)\\\"$" `subst` "$1") $
case dfltTxt optdesc of
'i' : 'd' : ' ' : x -> x
"GHC.Types.[]" -> "[]"
y -> y
in case disp of
"Data.Maybe.Nothing" -> ""
"Nothing" -> ""
z -> z
in -- mkOpt shorts longs
-- (optSetVal optdesc)
-- (optdesc ^. summary)
-- (optdesc ^. descn)
-- (display_type)
-- (display_dflt)
mAppEQ [ return $ VarE 'mkOpt
, appE (varE 'concat) (lift shorts) -- short options
, lift longs -- long options
, optSetVal optdesc -- handler (setval*)
, stringEQ $ optdesc ^. summary -- summary help
, stringEQ $ optdesc ^. descn -- long help
, stringEQ display_type -- type name text (for help)
, stringEQ display_dflt -- default value (for help)
]
-- helpmeQ ---------------------------------------------------------------------
-- | ExpQ variant of `helpme`
helpmeQ :: ArgArity -> String -> ExpQ
helpmeQ arity argtype =
[| mkOpt "" [ "help" ] (helpme def { arg_arity = arity, arg_type = argtype })
"this help; use --help=<opt> for detail (no leading hyphens on <opt>)"
(concat [ "Provide help text: without an arg, produces a summary options "
, "output; with an arg (--help=foo), then detailed help text for "
, "that option (if any is available) will be output."
])
"" "" -- option typename; dflt
|]
-- mkEffectorBody --------------------------------------------------------------
{- | build a (do) stmt that takes a GetoptName__ record, for each field in turn
extracts the PCLV, passes through the relevant defaulter, to the relevant
enactor; and ultimately builds a GetoptName record from the resultant values
type of the resulting expression is GetoptName__ -> IO GetoptName
-}
-- mkEffectorBody g = do
-- ((a, b, ...), exs) <- runWriterT $ do
-- a <- tryWriteF $ enactor (dfGetter a___)
-- b <- tryWriteF $ enactor (dfGetter b___)
-- ...
-- return (a, b, ...)
-- return $ if null exs
-- then Right $ GetoptName (fromJust a) (fromJust b) ...
-- else Left exs
-- (do
-- ((string_x', incr_x', handle_x'), exs) <- runWriterT $ do
-- string_x <- tryWriteF $
-- return (((fromMaybe "") . (view s___)) pclv);
-- incr_x <- tryWriteF $ return (view incr___ pclv)
-- handle_x <- tryWriteF $
-- openFileRO (((fromMaybe "/etc/motd")
-- . (view handle___)) pclv)
-- return (string_x, incr_x, handle_x)
-- return $ if null exs
-- then Right $ Getoptsx (fromJust string_x')
-- (fromJust incr_x')
-- (fromJust handle_x')
-- else Left exs
-- above)
mkEffectorBody :: [OptDesc] -- ^ option field list
-> String -- ^ name of the generated getopt_th
-> Name -- ^ name of the fn param
-> ExpQ
mkEffectorBody optdescs getoptName pclv = do
-- bs are the names that are assigned to, within the writer monad
-- binds are each individual bound defaulted option
(bs, binds) <- mapAndUnzipM (effectBind pclv) optdescs
-- mbs are the names of the maybe values, having been try-ed; in each case,
-- it's the name from within the writer, plus a trailing "'"
mbs <- sequence $ fmap (newName . nameBase) bs
-- rtn_bs_tup is the return of a tuple of each bind (return (a, b, ...))
let rtn_bs_tup = [NoBindS (AppE (VarE 'return) (TupE (fmap VarE bs)))]
run_writer = composeApE (VarE 'runWriterT) (DoE (binds ++ rtn_bs_tup))
exs = mkName "exs"
-- c'tor args are the map of fromJust across the maybe binds
c'tor_args = fmap (AppE (VarE 'fromJust) . VarE) mbs
c'tor = mAppE ((ConE $ ov_typename getoptName) : c'tor_args)
check = CondE -- if null exs
(AppE (VarE 'null) (VarE exs))
-- then Right $ GetoptName (fromJust a) (fromJust b) ...
(composeApE (ConE 'Right) c'tor)
-- else Left exs
(AppE (ConE 'Left) (VarE exs))
return (DoE -- ((a, b, ...), exs) <- runWriterT...
[ BindS (TupP [TupP (fmap VarP mbs), VarP exs]) run_writer
-- return $ if ...
, NoBindS (composeApE (VarE 'return) check)
])
-- effectBind ------------------------------------------------------------------
-- | given some var pclv which is of type GetoptName__, return a stmt of the
-- form b <- enactor (dfGetter pclv)
-- (e.g., string_x <- tryWriteF $ return (((fromMaybe "") . (view s___)) pclv
-- above)
effectBind :: Name -> OptDesc -> Q (Name, Stmt)
effectBind pclv o = do
b <- newName $ name o
dfg <- dfGetter o
let enact = AppE (enactor o) (AppE dfg (VarE pclv))
tryW = composeApE (VarE 'tryWriteF) enact
return (b, BindS (VarP b) tryW)
-- t2apply ---------------------------------------------------------------------
-- | apply a monadic fn to the second element of a monadic pair
t2apply :: (Monad m, Functor m) => (b -> m b') -> m (a, b) -> m (a, b')
t2apply f = (>>= _2 f)
-- tryWrite --------------------------------------------------------------------
-- | evaluate an IO thing, catch any errors, write them to a WriterT
_tryWrite :: IO a -> WriterT [SomeException] IO (Maybe a)
_tryWrite io = do
io' <- liftIO (try io >>= evaluate)
case io' of
Left e -> tell [e] >> liftIO (return Nothing)
Right r -> (liftIO . return . Just) r
-- tryWriteF -------------------------------------------------------------------
-- | tryWriteF for things susceptible to DeepSeq
tryWriteF :: NFData a => IO a -> WriterT [NFException] IO (Maybe a)
tryWriteF io = do
io' <- liftIO (try io >>= evaluate . force)
case io' of
Left e -> tell [e] >> liftIO (return Nothing)
Right r -> (liftIO . return . Just) r
-- NFException -----------------------------------------------------------------
-- | A SomeException susceptible to DeepSeq. Doesn't actually do anything, but
-- means that we can use it within an NFData/force context (e.g., tryWriteF)
newtype NFException = NFException SomeException
deriving Typeable
instance Show NFException where
show (NFException e) = show e
instance NFData NFException where
instance Exception NFException where
toException (NFException e) = e
fromException = Just . NFException
-- checkEx ---------------------------------------------------------------------
-- | check an Either [Exception] a; if Left, write the exceptions to stderr and
-- exitUsage
checkEx :: Exception e => IO (Either [e] a) -> IO a
checkEx ei_io = do
ei <- ei_io
case ei of
Left exs -> forM_ (fmap show exs) errOut >> exitUsage
Right r -> return r
-- that's all, folks! ----------------------------------------------------------
| sixears/getopt | src/Console/GetoptTH.hs | mit | 37,982 | 0 | 21 | 13,263 | 3,111 | 1,758 | 1,353 | 226 | 7 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecordWildCards #-}
module TBR.Core
( add
, finish
, list
, move
, random
, remove
, search
, start
, status
, stop
) where
import Control.Applicative
import Control.Monad.Error (catchError)
import Control.Monad.State
import Data.Function (on)
import Data.Monoid
import qualified Data.Set as Set
import Data.Text (Text)
import Data.Text.Lazy (toStrict)
import System.Random (randomRIO)
import TBR
import TBR.Monad
import Text.Shakespeare.Text (st)
--------------------------------------------------------------------------------
-- Utility operations
-- | Returns a subset of the current booklist that contains the books for which
-- the given predicate returns true.
find :: MonadState BookList m => (Book -> Bool) -> m BookList
find f = gets (Set.filter f)
-- | Returns books in the given section.
withSection :: MonadState BookList m => Section -> m BookList
withSection s = find $ (== s) . bookSection
-- | Returns the books that are marked as being read.
reading :: MonadState BookList m => m BookList
reading = withSection Reading
-- | Returns the books under the to-be-read section.
toBeRead :: MonadState BookList m => m BookList
toBeRead = withSection ToBeRead
-- | Returns all books that are not being read.
allButReading :: MonadState BookList m => m BookList
allButReading = find $ (/= Reading) . bookSection
-- | Returns all books that are under a custom list.
others :: MonadState BookList m => m BookList
others = find $ isOther . bookSection
where isOther (Other _) = True
isOther _ = False
-- | Returns books that are in the section that matches the given name.
matchSection :: MonadBooks m => Text -> m BookList
matchSection = findSection >=> withSection
-- | Queries the given list of books for books that match the given criteria.
query :: Text -> BookList -> BookList
query q = Set.filter matcher
where matcher = (||) <$> match . bookTitle
<*> match . bookAuthor
match = tokenMatch q
-- | Expects exactly one book and throws an error if that is not satisfied.
expect1 :: MonadBooks m => BookList -> m Book
expect1 bl = case Set.toAscList bl of
[] -> throwError [st|Unable to find such a book.|]
[b] -> return b
_ -> do putLn "The query matched:"
printBookList bl
throwError "Please refine the query."
-- | Finds exactly one book that matches the given query and throws an error
-- otherwise.
query1 :: MonadBooks m => Text -> BookList -> m Book
query1 q = expect1 . query q
-- | Returns all books in the list.
allBooks :: MonadState BookList m => m BookList
allBooks = get
-- | Finds and returns a @Section@ that matches the given query.
findSection :: MonadBooks m => Text -> m Section
findSection name
| name `eq` "Reading" = return Reading
| name `eq` "To Be Read" = return ToBeRead
| otherwise = findOther
where
eq = (==) `on` tokens
findOther = do
matches <- gets $ Set.toAscList
. Set.filter match
. Set.map bookSection
case matches of
[o] -> return o
_ -> throwError [st|Unable to find such a section.|]
where
match (Other x) = name `eq` x
match _ = False
--------------------------------------------------------------------------------
-- Formatting
-- | Formats a single book in a nice readable format.
formatBook :: Book -> Text
formatBook Book{..} = [st|#{bookTitle} by #{bookAuthor}|]
-- | Prints the given list of books.
printBookList :: (MonadIO m) => BookList -> m ()
printBookList = puts . toStrict . writeBookList
--------------------------------------------------------------------------------
-- Commands
add :: MonadBooks m => Text -> Text -> Maybe Text -> m ()
add title author lname = do
books <- allBooks
unless (Set.null $ matches books) $
throwError "You have already added that book."
modify $ Set.insert book
putLn [st|Added #{formatBook book} to the reading list.|]
where book = Book title author' (maybe ToBeRead Other lname)
matches bl = query title bl `Set.intersection` query author bl
author' = capitalize author
finish :: MonadBooks m => Maybe Text -> m ()
finish q = do
b <- reading >>= maybe expect1 query1 q
modify $ Set.delete b
putLn [st|Finished reading #{formatBook b}|]
list :: Maybe Text -> BooksM ()
list name = maybe allBooks matchSection name >>= printBookList
move :: MonadBooks m => Text -> Text -> m ()
move q lname = do
b <- allBooks >>= query1 q
-- Try to move into an existing section before creating a new one.
section <- findSection lname `catchError`
(const . return $ Other lname)
modify $ Set.insert (b { bookSection = section })
. Set.delete b
putLn [st|Moved #{formatBook b} to #{show section}|]
random :: MonadBooks m => Maybe Text -> m ()
random lname = do
books <- maybe toBeRead matchSection lname
author <- select $ Set.map bookAuthor books
book <- select $ Set.filter ((== author) . bookAuthor) books
putLn $ formatBook book
where
select s = liftIO $ do
i <- randomRIO (0, Set.size s - 1)
return $ Set.toAscList s !! i
remove :: MonadBooks m => Text -> m ()
remove q = do
b <- allButReading >>= query1 q
modify $ Set.delete b
putLn [st|Removed #{formatBook b} from the reading list.|]
search :: Text -> BooksM ()
search q = query q <$> allBooks >>= printBookList
start :: MonadBooks m => Text -> m ()
start q = do
b <- allButReading >>= query1 q
modify $ Set.insert (b { bookSection = Reading })
. Set.delete b
putLn [st|Started reading #{formatBook b}.|]
status :: (Functor m, MonadBooks m) => m ()
status = do
rl <- reading
unless (Set.null rl) $
printBookList rl
tbrCount <- Set.size <$> toBeRead
otherCount <- Set.size <$> others
putLn $ [st|There are #{show $ tbrCount + otherCount} |]
<> [st|(#{show otherCount} other) books to be read.|]
stop :: (Functor m, MonadBooks m) => Maybe Text -> Maybe Text -> m ()
stop q l = do
b <- reading >>= maybe expect1 query1 q
sec <- maybe (return ToBeRead) findSection l
modify $ Set.insert (b { bookSection = sec })
. Set.delete b
putLn [st|Stopped reading #{formatBook b}.|]
| abhinav/tbr | app/TBR/Core.hs | mit | 6,784 | 0 | 15 | 1,836 | 1,824 | 934 | 890 | 142 | 3 |
------------ exercise 2 : Four in a Row -----------------
-- réussite = 4 de suite, 4 dans la meme colonne, ou 4 en diagonal
checkFour :: [[Char]] -> True
| t00n/ProjectEuler | TP4.hs | epl-1.0 | 159 | 0 | 7 | 31 | 18 | 11 | 7 | 1 | 0 |
module Access.System.IO.Error
( module System.IO.Error
, IOErrorAccess(..)
) where
import System.IO.Error
import Access.Core
class Access io => IOErrorAccess io where
ioError' :: IOError -> io a
catchIOError' :: io a -> (IOError -> io a) -> io a
tryIOError' :: io a -> io (Either IOError a)
modifyIOError' :: (IOError -> IOError) -> io a -> io a
instance IOErrorAccess IO where
ioError' = ioError
catchIOError' = catchIOError
tryIOError' = tryIOError
modifyIOError' = modifyIOError
| bheklilr/base-io-access | Access/System/IO/Error.hs | gpl-2.0 | 578 | 0 | 11 | 170 | 169 | 91 | 78 | 15 | 0 |
-- GenI surface realiser
-- Copyright (C) 2009 Eric Kow
--
-- This program is free software; you can redistribute it and/or
-- modify it under the terms of the GNU General Public License
-- as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-- | Just regression testing of suites
-- This can be seen as regression testing of GenI
-- and also of grammars using GenI
{-# LANGUAGE OverloadedStrings #-}
module NLP.GenI.Regression (mkSuite) where
import Control.Applicative ((<$>))
import Control.Monad (forM_)
import Control.Monad.Trans.Error
import Data.Either
import Data.IORef (newIORef, readIORef, modifyIORef)
import Data.List(sort)
import System.FilePath ((</>))
import Test.HUnit
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import qualified Data.Text as T
import NLP.GenI
import NLP.GenI.Configuration
import NLP.GenI.Console
import NLP.GenI.General ( fst3, )
import NLP.GenI.LexicalSelection ( CustomSem )
import NLP.GenI.Pretty
import NLP.GenI.Semantics ( SemInput )
import NLP.GenI.TestSuite ( TestCase(tcSem, tcName, tcExpected) )
import NLP.GenI.Simple.SimpleBuilder
mkSuite :: IO Test.Framework.Test
mkSuite = do
goods <- sequence
[ goodSuite "ej" (usualArgs "examples/ej" [])
, goodSuite "chatnoir" (usualArgs "examples/chatnoir" [])
, goodSuite "demo" (usualArgs "examples/demo" [])
, goodSuite "promettre" (usualArgs "examples/promettre" ["--opts=pol"])
, goodSuite "artificial" (usualArgs "examples/artificial" [])
, badSuite "artificial (bad)" (usualArgsBad "examples/artificial" [])
]
return $ testGroup "Functional tests (coarse grained)" goods
usualArgs :: FilePath -> [String] -> [String]
usualArgs p args =
[ "-t", p </> "trees"
, "-l", p </> "lexicon"
, "-s", p </> "suite"
] ++ args
usualArgsBad :: FilePath -> [String] -> [String]
usualArgsBad p args =
[ "-t", p </> "trees"
, "-l", p </> "lexicon"
, "-s", p </> "suite-bad"
] ++ args
noGui = setFlag DisableGuiFlg ()
type TestMaker = ProgStateRef -> CustomSem SemInput -> TestCase SemInput -> Test.Framework.Test
goodSuite = genSuite goodSuiteCase
badSuite = genSuite badSuiteCase
genSuite :: TestMaker -> String -> [String] -> IO Test.Framework.Test
genSuite mkCase name xs = do
confArgs <- processInstructions =<< treatArgs optionsForStandardGenI xs
let pst = emptyProgState (noGui confArgs)
pstRef <- newIORef pst
wrangler <- defaultCustomSem pst
loadEverything pstRef wrangler
suite <- case getListFlag TestInstructionsFlg confArgs of
[] -> error "NLP.GenI.Regression: not expecting empty instructions"
[x] -> loadNextSuite pstRef wrangler x
_ -> error "NLP.GenI.Regression: not expecting multiple instructions"
return . testGroup name $ map (mkCase pstRef wrangler) suite
goodSuiteCase :: TestMaker
goodSuiteCase pstRef wrangler tc = testCase (T.unpack (tcName tc)) $ do
res <- runOnSemInput pstRef wrangler tc
let sentences = map lemmaSentenceString (successes res)
name = tcName tc
semStr = prettyStr . fst3 . tcSem $ tc
mainMsg = "for " ++ semStr ++ ", got no results"
assertBool "got result" (not (null sentences))
forM_ (tcExpected tc) $ \e ->
assertBool ("got result: " ++ T.unpack e) (e `elem` sentences)
badSuiteCase :: TestMaker
badSuiteCase pstRef wrangler tc = testCase (T.unpack (tcName tc)) $ do
res <- runOnSemInput pstRef wrangler tc
let sentences = map lemmaSentenceString (successes res)
assertBool "no results" (null sentences)
runOnSemInput :: ProgStateRef -> CustomSem SemInput -> TestCase SemInput -> IO [GeniResult]
runOnSemInput pstRef wrangler tc = do
pst <- readIORef pstRef
let config = pa pst
go = case getBuilderType config of
SimpleBuilder -> helper pst simpleBuilder_2p
SimpleOnePhaseBuilder -> helper pst simpleBuilder_1p
sort `fmap` go
where
helper pst b = (grResults . simplifyResults) <$>
(runErrorT $ runGeni pst wrangler b tc)
successes :: [GeniResult] -> [GeniSuccess]
successes xs = [ s | GSuccess s <- xs ]
| kowey/GenI | geni-test/NLP/GenI/Regression.hs | gpl-2.0 | 4,700 | 0 | 14 | 920 | 1,171 | 624 | 547 | 88 | 3 |
{-# LANGUAGE ScopedTypeVariables, NoMonomorphismRestriction, RecordWildCards #-}
module Main where
import Control.Applicative
import Control.Monad
import Control.Monad.Error
import Control.Monad.Reader
import Control.Monad.State
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Traversable as T
import qualified Data.HashMap.Lazy as HM
import Data.Maybe
import System.Directory
import System.Environment
import System.FilePath ((</>))
import System.IO
import System.Log.Logger
--
import HEP.Automation.MadGraph.Model.ADMXQLD111
import HEP.Automation.MadGraph.Run
import HEP.Automation.MadGraph.SetupType
import HEP.Automation.MadGraph.Type
--
import HEP.Automation.EventChain.Driver
import HEP.Automation.EventChain.File
import HEP.Automation.EventChain.LHEConn
import HEP.Automation.EventChain.Type.Skeleton
import HEP.Automation.EventChain.Type.Spec
import HEP.Automation.EventChain.Type.Process
import HEP.Automation.EventChain.SpecDSL
import HEP.Automation.EventChain.Simulator
import HEP.Automation.EventChain.Process
import HEP.Automation.EventChain.Process.Generator
import HEP.Automation.EventGeneration.Config
import HEP.Automation.EventGeneration.Type
import HEP.Automation.EventGeneration.Work
import HEP.Parser.LHE.Type
import HEP.Parser.LHE.Sanitizer.Type
import HEP.Storage.WebDAV
--
import qualified Paths_madgraph_auto as PMadGraph
import qualified Paths_madgraph_auto_model as PModel
jets = [1,2,3,4,-1,-2,-3,-4,21]
leptons = [11,13,-11,-13]
lepplusneut = [11,12,13,14,-11,-12,-13,-14]
adms = [9000201,-9000201,9000202,-9000202]
sup = [1000002,-1000002]
sdownR = [2000001,-2000001]
p_gluino = d ([1000021], [t lepplusneut, t jets, t jets, t adms])
p_2sg_2l4j2x :: DCross
p_2sg_2l4j2x = x (t proton, t proton, [p_gluino, p_gluino])
idx_2sg_2l4j2x :: CrossID ProcSmplIdx
idx_2sg_2l4j2x = mkCrossIDIdx (mkDICross p_2sg_2l4j2x)
map_2sg_2l4j2x :: ProcSpecMap
map_2sg_2l4j2x =
HM.fromList [(Nothing , MGProc [] [ "p p > go go QED=0" ])
,(Just (3,1000021,[]), MGProc [ "define lep = e+ e- mu+ mu- ve ve~ vm vm~ "
, "define sxx = sxxp sxxp~ "]
[ "go > lep j j sxx " ] )
,(Just (4,1000021,[]), MGProc [ "define lep = e+ e- mu+ mu- ve ve~ vm vm~ "
, "define sxx = sxxp sxxp~ "]
[ "go > lep j j sxx " ] )
]
modelparam mgl msq msl mneut = ADMXQLD111Param mgl msq msl mneut
-- |
mgrunsetup :: Int -> RunSetup
mgrunsetup n =
RS { numevent = n
, machine = LHC8 ATLAS
, rgrun = Auto
, rgscale = 200.0
, match = NoMatch
, cut = NoCut
, pythia = RunPYTHIA
, lhesanitizer = LHESanitize (Replace [(9000201,1000022),(-9000201,1000022)])
, pgs = RunPGS (AntiKTJet 0.4,NoTau)
, uploadhep = NoUploadHEP
, setnum = 1
}
worksets = [ (mgl,msq,50000,50000, 10000) | mgl <- [800], msq <- [500,600,700,800, 1300,1400,1500,1600] ]
-- mgl <- [100,200..2000], msq <- [100,200..2000] ]
main :: IO ()
main = do
args <- getArgs
let fp = args !! 0
n1 = read (args !! 1) :: Int
n2 = read (args !! 2) :: Int
updateGlobalLogger "MadGraphAuto" (setLevel DEBUG)
mapM_ (scanwork fp) (drop (n1-1) . take n2 $ worksets )
{-
-- |
getScriptSetup :: FilePath -- ^ sandbox directory
-> FilePath -- ^ mg5base
-> FilePath -- ^ main montecarlo run
-> IO ScriptSetup
getScriptSetup dir_sb dir_mg5 dir_mc = do
dir_mdl <- (</> "template") <$> PModel.getDataDir
dir_tmpl <- (</> "template") <$> PMadGraph.getDataDir
return $
SS { modeltmpldir = dir_mdl
, runtmpldir = dir_tmpl
, sandboxdir = dir_sb
, mg5base = dir_mg5
, mcrundir = dir_mc
}
-}
scanwork :: FilePath -> (Double,Double,Double,Double,Int) -> IO ()
scanwork fp (mgl,msq,msl,mneut,n) = do
homedir <- getHomeDirectory
getConfig fp >>=
maybe (return ()) (\ec -> do
let ssetup = evgen_scriptsetup ec
whost = evgen_webdavroot ec
pkey = evgen_privatekeyfile ec
pswd = evgen_passwordstore ec
Just cr <- getCredential pkey pswd
let wdavcfg = WebDAVConfig { webdav_credential = cr
, webdav_baseurl = whost }
param = modelparam mgl msq msl mneut
mgrs = mgrunsetup n
evchainGen ADMXQLD111
ssetup
("Work20130610_2sg","2sg_2l4j2x")
param
map_2sg_2l4j2x p_2sg_2l4j2x
mgrs
let wsetup' = getWorkSetupCombined ADMXQLD111 ssetup param ("Work20130610_2sg","2sg_2l4j2x") mgrs
wsetup = wsetup' { ws_storage = WebDAVRemoteDir "montecarlo/admproject/XQLD/8TeV/scan_2sg_2l4j2x" }
putStrLn "phase2work start"
phase2work wsetup
putStrLn "phase3work start"
phase3work wdavcfg wsetup
)
phase2work :: WorkSetup ADMXQLD111 -> IO ()
phase2work wsetup = do
r <- flip runReaderT wsetup . runErrorT $ do
ws <- ask
let (ssetup,psetup,param,rsetup) =
((,,,) <$> ws_ssetup <*> ws_psetup <*> ws_param <*> ws_rsetup) ws
cardPrepare
case (lhesanitizer rsetup,pythia rsetup) of
(NoLHESanitize,_) -> return ()
(LHESanitize pid, RunPYTHIA) -> do
sanitizeLHE
runPYTHIA
runPGS
runClean
(LHESanitize pid, NoPYTHIA) -> do
sanitizeLHE
cleanHepFiles
print r
return ()
-- |
phase3work :: WebDAVConfig -> WorkSetup ADMXQLD111 -> IO ()
phase3work wdav wsetup = do
uploadEventFull NoUploadHEP wdav wsetup
return ()
| wavewave/lhc-analysis-collection | exe/2013-06-10-XQLD-2sg.hs | gpl-3.0 | 5,991 | 0 | 20 | 1,717 | 1,492 | 848 | 644 | 131 | 3 |
module Database.Design.Ampersand.Input
( module Database.Design.Ampersand.Input.ADL1.CtxError
, module Database.Design.Ampersand.Input.Parsing
) where
import Database.Design.Ampersand.Input.ADL1.CtxError (CtxError,Guarded(..),showErr)
import Database.Design.Ampersand.Input.Parsing (parseADL,parseADL1pExpr,parseRule,parseCtx)
| 4ZP6Capstone2015/ampersand | src/Database/Design/Ampersand/Input.hs | gpl-3.0 | 345 | 0 | 6 | 34 | 75 | 54 | 21 | 5 | 0 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-
Copyright : Copyright (C) 2014-2017 Synchrotron Soleil
License : GPL3+
Maintainer : [email protected]
Stability : Experimental
Portability: GHC only?
-}
import Numeric.LinearAlgebra (Vector, Matrix,
vecdisp, disps,
dispf)
import Numeric.Units.Dimensional.Prelude (nano, meter, degree,
(*~),
(*~~), (/~~))
import Options.Applicative hiding ((<>))
import Hkl.Lattice
import Hkl.Diffractometer
dispv :: Vector Double -> IO ()
dispv = putStr . vecdisp (disps 2)
disp :: Matrix Double -> IO ()
disp = putStr . dispf 3
-- command parsing
data Command
= Ca Double Double Double -- ca command
data Options
= Options Command
withInfo :: Parser a -> String -> ParserInfo a
withInfo opts desc = info (helper <*> opts) $ progDesc desc
parseCa :: Parser Command
parseCa = Ca
<$> argument auto (metavar "H")
<*> argument auto (metavar "K")
<*> argument auto (metavar "L")
parseCommand :: Parser Command
parseCommand = subparser $
command "ca" (parseCa `withInfo` "compute angles for the given hkl")
parseOptions :: Parser Options
parseOptions = Options <$> parseCommand
-- Actual program logic
run :: Options -> IO ()
run (Options cmd) =
case cmd of
Ca h k l-> do
print (solution /~~ degree)
dispv (computeHkl e4c solution lattice)
disp path
where
(sol, path) = computeAngles e4c angles lattice mode [h, k, l]
s = [30.0, 0.0, 0.0, 0.0, 10.0, 0.0]
d = [60.0]
angles = (s ++ d) *~~ degree
solution = fromMode mode sol angles
lattice = Cubic (1.54 *~ nano meter)
mode = ModeHklE4CConstantPhi
main :: IO ()
main = run =<< execParser
(parseOptions `withInfo` "Interact with hkl API")
| picca/hkl | contrib/haskell/src/hkl.hs | gpl-3.0 | 2,046 | 0 | 13 | 653 | 550 | 298 | 252 | 48 | 1 |
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE Rank2Types #-}
module Data.Colour.Manifold (
-- * Full colour space
Colour, QuantisedColour(..)
-- * 2D/1D projected colour space
, ColourMap, planarColourMap, colourCurve, colourMapPlane, spectralSwing
, ColourPlane, cpCold, cpNeutral, cpHot, spanColourPlane
-- * Mapping data to colours
, ColourMappable(..)
-- * Predefined colour maps
, SimpleColourMap, blackBlueYellowRed, brightVsRed, redVsBlue
) where
import Data.Functor (($>))
import Control.Applicative (empty)
import Control.Applicative.Constrained
import Control.Arrow.Constrained
import Data.Semigroup
import Data.Manifold.PseudoAffine
import Data.Manifold.Types
import Data.Manifold.Atlas
import Data.Manifold.Riemannian
import Data.VectorSpace
import Data.AffineSpace
import Data.AdditiveGroup
import Data.Manifold.Shade (Shade(..), Shade'(..), rangeWithinVertices)
import Data.Colour.SRGB (toSRGB, toSRGB24)
import Data.Colour.SRGB.Linear
import Data.Colour hiding (AffineSpace)
import Data.Colour.Names
import Math.LinearMap.Category
import Linear.V2
import Linear.V3
import qualified Prelude as Hask
import Control.Category.Constrained.Prelude
import Codec.Picture.Types
import Data.Coerce
import Data.Type.Coercion
import Data.CallStack
import Control.Lens
newtype ColourNeedle = ColourNeedle { getRGBNeedle :: RGB ℝ } deriving (Eq, Show)
asV3Needle :: ColourNeedle -+> V3 ℝ
asV3Needle = LinearFunction $ \(ColourNeedle (RGB r g b)) -> V3 r g b
fromV3Needle :: V3 ℝ -+> ColourNeedle
fromV3Needle = LinearFunction $ \(V3 r g b) -> ColourNeedle $ RGB r g b
asV3Tensor :: (ColourNeedle⊗w) -+> (V3 ℝ⊗w)
asV3Tensor = LinearFunction $ \(Tensor (RGB r g b)) -> Tensor $ V3 r g b
fromV3Tensor :: (V3 ℝ⊗w) -+> (ColourNeedle⊗w)
fromV3Tensor = LinearFunction $ \(Tensor (V3 r g b)) -> Tensor $ RGB r g b
fromV3LinMap :: (V3 ℝ+>w) -+> (ColourNeedle+>w)
fromV3LinMap = LinearFunction $ \(LinearMap (V3 r g b)) -> LinearMap $ RGB r g b
withRGBNeedle :: (RGB Double -> RGB Double) -> ColourNeedle -> ColourNeedle
withRGBNeedle f (ColourNeedle q) = ColourNeedle $ f q
instance AdditiveGroup ColourNeedle where
zeroV = ColourNeedle $ RGB 0 0 0
negateV = withRGBNeedle $ fmap negate
ColourNeedle q ^+^ ColourNeedle s = ColourNeedle $ liftA2 (+) q s
instance VectorSpace ColourNeedle where
type Scalar ColourNeedle = ℝ
(*^)μ = withRGBNeedle $ fmap (μ*)
instance TensorSpace ColourNeedle where
type TensorProduct ColourNeedle w = RGB w
scalarSpaceWitness = ScalarSpaceWitness
linearManifoldWitness = LinearManifoldWitness BoundarylessWitness
zeroTensor = Tensor (RGB zeroV zeroV zeroV)
toFlatTensor = LinearFunction $ \(ColourNeedle (RGB r g b)) -> Tensor (RGB r g b)
fromFlatTensor = LinearFunction $ \(Tensor (RGB r g b)) -> ColourNeedle (RGB r g b)
addTensors (Tensor (RGB r g b)) (Tensor (RGB r' g' b'))
= Tensor $ RGB (r^+^r') (g^+^g') (b^+^b')
subtractTensors (Tensor (RGB r g b)) (Tensor (RGB r' g' b'))
= Tensor $ RGB (r^-^r') (g^-^g') (b^-^b')
negateTensor = LinearFunction $ \(Tensor (RGB r g b))
-> Tensor (RGB (negateV r) (negateV g) (negateV b))
scaleTensor = bilinearFunction $ \μ (Tensor (RGB r g b))
-> Tensor (RGB (μ*^r) (μ*^g) (μ*^b))
tensorProduct = bilinearFunction $ \(ColourNeedle (RGB r g b)) w
-> Tensor (RGB (r*^w) (g*^w) (b*^w))
transposeTensor = (getLinearFunction fmapTensor fromV3Needle)
. transposeTensor . asV3Tensor
fmapTensor = bilinearFunction $ \f (Tensor (RGB r g b))
-> Tensor $ RGB (f $ r) (f $ g) (f $ b)
fzipTensorWith = bilinearFunction $ \f (Tensor (RGB r g b), Tensor (RGB r' g' b'))
-> Tensor $ RGB (f $ (r,r')) (f $ (g,g')) (f $ (b,b'))
coerceFmapTensorProduct _ Coercion = Coercion
wellDefinedTensor t@(Tensor (RGB r g b))
= wellDefinedVector r >> wellDefinedVector g >> wellDefinedVector b $> t
instance LinearSpace ColourNeedle where
type DualVector ColourNeedle = ColourNeedle
linearId = LinearMap $ RGB (ColourNeedle $ RGB 1 0 0)
(ColourNeedle $ RGB 0 1 0)
(ColourNeedle $ RGB 0 0 1)
tensorId = ti dualSpaceWitness (asTensor $ id)
where ti :: ∀ w . (TensorSpace w, Scalar w ~ ℝ)
=> DualSpaceWitness w -> Tensor ℝ (DualVector w) w
-> Tensor ℝ ColourNeedle w+>Tensor ℝ ColourNeedle w
ti DualSpaceWitness wid = LinearMap $ RGB
(fmap (LinearFunction $ \w -> Tensor $ RGB w zeroV zeroV) $ wid)
(fmap (LinearFunction $ \w -> Tensor $ RGB zeroV w zeroV) $ wid)
(fmap (LinearFunction $ \w -> Tensor $ RGB zeroV zeroV w) $ wid)
coerceDoubleDual = Coercion
dualSpaceWitness = DualSpaceWitness
contractTensorMap = LinearFunction $ \(LinearMap (RGB (Tensor (RGB r _ _))
(Tensor (RGB _ g _))
(Tensor (RGB _ _ b))))
-> r ^+^ g ^+^ b
contractMapTensor = LinearFunction $ \(Tensor (RGB (LinearMap (RGB r _ _))
(LinearMap (RGB _ g _))
(LinearMap (RGB _ _ b))))
-> r ^+^ g ^+^ b
contractLinearMapAgainst = bilinearFunction $ \(LinearMap (RGB r g b)) f
-> channelRed (getRGBNeedle $ f $ r)
+ channelGreen (getRGBNeedle $ f $ g)
+ channelBlue (getRGBNeedle $ f $ b)
applyDualVector = bilinearFunction $
\(ColourNeedle (RGB r' g' b')) (ColourNeedle (RGB r g b))
-> r'*r + g'*g + b'*b
applyLinear = bilinearFunction $ \(LinearMap (RGB r' g' b')) (ColourNeedle (RGB r g b))
-> r'^*r ^+^ g'^*g ^+^ b'^*b
applyTensorFunctional = bilinearFunction
$ \(LinearMap (RGB r' g' b')) (Tensor (RGB r g b))
-> r'<.>^r + g'<.>^g + b'<.>^b
applyTensorLinMap = bilinearFunction
$ \(LinearMap (RGB r' g' b')) (Tensor (RGB r g b))
-> (r'+$r) ^+^ (g'+$g) ^+^ (b'+$b)
where f+$x = getLinearFunction (getLinearFunction applyLinear $ fromTensor $ f) x
composeLinear = bilinearFunction $ \f (LinearMap (RGB r' g' b'))
-> LinearMap $ RGB (f +$ r') (f +$ g') (f +$ b')
where f+$x = getLinearFunction (getLinearFunction applyLinear f) x
instance SemiInner ColourNeedle where
dualBasisCandidates = cartesianDualBasisCandidates
[ColourNeedle (RGB 1 0 0), ColourNeedle (RGB 0 1 0), ColourNeedle (RGB 0 0 1)]
(\(ColourNeedle (RGB r g b)) -> abs <$> [r,g,b])
tensorDualBasisCandidates = map (second $ getLinearFunction asV3Tensor)
>>> tensorDualBasisCandidates
>>> map (fmap $ second $ getLinearFunction fromV3LinMap)
instance FiniteDimensional ColourNeedle where
data SubBasis ColourNeedle = ColourNeedleBasis
entireBasis = ColourNeedleBasis
enumerateSubBasis ColourNeedleBasis
= ColourNeedle <$> [RGB 1 0 0, RGB 0 1 0, RGB 0 0 1]
decomposeLinMap (LinearMap (RGB r g b)) = (ColourNeedleBasis, ([r,g,b]++))
decomposeLinMapWithin ColourNeedleBasis (LinearMap (RGB r g b)) = pure ([r,g,b]++)
recomposeSB ColourNeedleBasis [] = (ColourNeedle $ RGB 0 0 0, [])
recomposeSB ColourNeedleBasis [r] = (ColourNeedle $ RGB r 0 0, [])
recomposeSB ColourNeedleBasis [r,g] = (ColourNeedle $ RGB r g 0, [])
recomposeSB ColourNeedleBasis (r:g:b:l) = (ColourNeedle $ RGB r g b, l)
recomposeSBTensor ColourNeedleBasis sbw l
= let (r,l') = recomposeSB sbw l
(g,l'') = recomposeSB sbw l'
(b,l''') = recomposeSB sbw l''
in (Tensor $ RGB r g b, l''')
recomposeLinMap ColourNeedleBasis [] = (LinearMap $ RGB zeroV zeroV zeroV, [])
recomposeLinMap ColourNeedleBasis [r] = (LinearMap $ RGB r zeroV zeroV, [])
recomposeLinMap ColourNeedleBasis [r,g] = (LinearMap $ RGB r g zeroV, [])
recomposeLinMap ColourNeedleBasis (r:g:b:l) = (LinearMap $ RGB r g b, l)
recomposeContraLinMap f l = LinearMap $ RGB (f $ fmap (channelRed . getRGBNeedle) l)
(f $ fmap (channelGreen . getRGBNeedle) l)
(f $ fmap (channelBlue . getRGBNeedle) l)
recomposeContraLinMapTensor = rclmt dualSpaceWitness
where rclmt :: ∀ u w f . ( Hask.Functor f
, FiniteDimensional u, LinearSpace w
, Scalar u ~ ℝ, Scalar w ~ ℝ )
=> DualSpaceWitness u
-> (f ℝ -> w) -> f (ColourNeedle+>DualVector u)
-> (ColourNeedle⊗u)+>w
rclmt DualSpaceWitness fw mv = LinearMap $
(\c -> fromLinearMap $ recomposeContraLinMap fw
$ fmap (\(LinearMap q) -> c q) mv)
<$> RGB channelRed channelGreen channelBlue
uncanonicallyFromDual = id
uncanonicallyToDual = id
fromLinearMap :: ∀ s u v w . (LinearSpace u, Scalar u ~ s)
=> LinearMap s (DualVector u) w -> Tensor s u w
fromLinearMap = case dualSpaceWitness :: DualSpaceWitness u of
DualSpaceWitness -> coerce
asTensor :: ∀ s u v w . (LinearSpace u, Scalar u ~ s)
=> LinearMap s u w -> Tensor s (DualVector u) w
asTensor = coerce
fromTensor :: ∀ s u v w . (LinearSpace u, Scalar u ~ s)
=> Tensor s (DualVector u) w -> LinearMap s u w
fromTensor = coerce
instance Semimanifold ColourNeedle where
type Needle ColourNeedle = ColourNeedle
fromInterior = id; toInterior = pure
translateP = pure (^+^)
instance PseudoAffine ColourNeedle where
ColourNeedle q .-~. ColourNeedle s = pure . ColourNeedle $ liftA2 (-) q s
instance Atlas ColourNeedle where
type ChartIndex ColourNeedle = ()
interiorChartReferencePoint _ () = zeroV
lookupAtlas _ = ()
instance AffineSpace ColourNeedle where
type Diff ColourNeedle = ColourNeedle
(.-.) = (.-~!)
(.+^) = (.+~^)
fromLtdRGB :: LtdCol -> Colour ℝ
fromLtdRGB = fmap (\(CD¹ h Origin) -> h) >>> \(RGB r g b) -> rgb r g b
toLtdRGB :: Colour ℝ -> LtdCol
toLtdRGB = toRGB >>> fmap ((`CD¹`Origin) . min 1 . max 0)
type LtdCol = RGB (CD¹ ℝ⁰)
bijectToLtd :: ℝ -> CD¹ ℝ⁰
bijectToLtd 0 = CD¹ 0.5 Origin
bijectToLtd y
| ψ > 0.5 = CD¹ 1 Origin
| ψ > -0.5 = CD¹ ( 0.5 - ψ ) Origin
| otherwise = CD¹ 0 Origin
where ψ = (1 - sqrt(1+y^2)) / (2*y)
-- y = (x - 1/2) / (x*(1 - x))
-- y * x * (1 - x) = x - 1/2
-- y * x² - (1 - y) * x - 1/2 = 0
-- y * x² + (y - 1) * x - 1/2 = 0
-- x = (1 - y ± sqrt( (1-y)² + 2*y ) ) / (-2*y)
-- = (y - 1 +! sqrt( 1 + y² ) ) / (2*y) -- unstable for y ≈ 0
-- = 1/2 - (1 - sqrt( 1 + y² ) ) / (2*y)
bijectFromLtd :: CD¹ ℝ⁰ -> Maybe ℝ
bijectFromLtd (CD¹ x Origin)
| x>0 && x<1 = return $ (x - 0.5) / (x*(1 - x))
| otherwise = empty
instance Semimanifold (Colour ℝ) where
type Interior (Colour ℝ) = ColourNeedle
type Needle (Colour ℝ) = ColourNeedle
fromInterior (ColourNeedle q) = fromLtdRGB $ fmap bijectToLtd q
toInterior = fmap ColourNeedle . toin . toLtdRGB
where toin (RGB r g b) = liftA3 RGB (bijectFromLtd r) (bijectFromLtd g) (bijectFromLtd b)
translateP = pure (^+^)
instance PseudoAffine (Colour ℝ) where
c .-~. ζ = liftA2 (^-^) (toInterior c) (toInterior ζ)
instance Geodesic (Colour ℝ) where
geodesicBetween a b = return $ \(D¹ q) -> blend ((q+1)/2) b a
instance Geodesic ColourNeedle where
geodesicBetween (ColourNeedle (RGB r g b)) (ColourNeedle (RGB r' g' b'))
= return $ \(D¹ q) -> let η' = (q+1)/2 in ColourNeedle
$ RGB (lerp r r' η')
(lerp g g' η')
(lerp b b' η')
instance Atlas (Colour ℝ) where
type ChartIndex (Colour ℝ) = ()
chartReferencePoint () = grey
interiorChartReferencePoint = \_ () -> intGrey
where Just intGrey = toInterior (grey :: Colour ℝ)
lookupAtlas _ = ()
class QuantisedColour c where
quantiseColour :: Colour ℝ -> c
instance QuantisedColour PixelRGBF where
quantiseColour c = PixelRGBF r g b
where RGB r g b = fmap realToFrac $ toSRGB c
instance QuantisedColour PixelRGB8 where
quantiseColour c = PixelRGB8 r g b
where RGB r g b = toSRGB24 c
-- | A two-dimensional, smoothly varying colour palette.
data ColourMap x = ColourMap {
_cmPlane :: ColourPlane
, _cmSpectSwing :: ℝ
}
planarColourMap :: ColourPlane -> ColourMap x
planarColourMap = (`ColourMap`0)
colourCurve :: ColourPlane -> ℝ -> ColourMap ℝ
colourCurve = ColourMap
spectralSwing :: (Needle x ~ ℝ) => Traversal' (ColourMap x) ℝ
spectralSwing = lens _cmSpectSwing (\cm sw' -> cm{_cmSpectSwing = sw'})
colourMapPlane :: Traversal' (ColourMap x) ColourPlane
colourMapPlane = lens _cmPlane (\cm pl' -> cm{_cmPlane = pl'})
data ColourPlane = ColourPlane {
_cpCold :: Colour ℝ
, _cpNeutral :: Interior (Colour ℝ)
, _cpHot :: Colour ℝ
}
makeLenses ''ColourPlane
spanColourPlane :: Interior (Colour ℝ) -- ^ Neutral colour
-> (Colour ℝ, Colour ℝ) -- ^ Extreme “cold” / “hot” colours
-> ColourPlane
spanColourPlane neutral (cold,hot) = ColourPlane cold neutral hot
class Geodesic x => ColourMappable x where
type ColourMapped x :: *
type MappingVertex x :: *
mapToColourWith :: HasCallStack
=> ColourMap (MappingVertex x)
-> Interior (MappingVertex x)
-> (MappingVertex x, MappingVertex x)
-> x
-> ColourMapped x
instance ColourMappable ℝ where
type ColourMapped ℝ = Colour ℝ
type MappingVertex ℝ = ℝ
mapToColourWith (ColourMap (ColourPlane coldC neutralC hotC) swing)
neutralP (coldP, hotP)
= (\(Shade c _) -> fromInterior c)
. shFn
. \x -> let φ = 2*(x-neutralP)/(hotP-coldP)
in Shade ( (1 - φ)/2 + (φ^2 - 1)*exp swing/2
, (φ + 1)/2 + (φ^2 - 1)*exp swing/2 )
(spanNorm [(256,0), (0,256)])
:: Shade (ℝ,ℝ)
where Just shFn = rangeWithinVertices ((0,0), neutralC)
[((1,0), coldC), ((0,1), hotC)]
instance ColourMappable (ℝ,ℝ) where
type ColourMapped (ℝ,ℝ) = Colour ℝ
type MappingVertex (ℝ,ℝ) = (ℝ,ℝ)
mapToColourWith (ColourMap cp swing)
(xN,yN) ((xCold,yCold), (xHot,yHot))
= mapToColourWith (ColourMap cp swing) (V2 xN yN) (V2 xCold yCold, V2 xHot yHot)
. \(x,y) -> (V2 x y)
instance ColourMappable ℝ² where
type ColourMapped ℝ² = Colour ℝ
type MappingVertex ℝ² = ℝ²
mapToColourWith (ColourMap (ColourPlane coldC neutralC hotC) swing)
neutralP (coldP, hotP)
= (\(Shade c _) -> fromInterior c)
. shFn
. \xy -> Shade xy quantisationNorm
where Just shFn = rangeWithinVertices (neutralP, neutralC)
[(coldP, coldC), (hotP, hotC)]
quantisationNorm = scaleNorm 256 . dualNorm
$ spanVariance [coldP^-^neutralP, hotP^-^neutralP]
class ColourMappable x => HasSimpleColourMaps x where
simpleColourMap :: ColourPlane -> ℝ -> ColourMap x
simpleColourMap = const . planarColourMap
instance HasSimpleColourMaps ℝ where
simpleColourMap = colourCurve
instance HasSimpleColourMaps (ℝ,ℝ)
instance HasSimpleColourMaps ℝ²
type SimpleColourMap = ∀ x . HasSimpleColourMaps x => ColourMap x
blackBlueYellowRed :: SimpleColourMap
blackBlueYellowRed
= simpleColourMap (spanColourPlane neutralc (darkblue,goldenrod)) 1
where Just neutralc = toInterior (dimgrey :: Colour ℝ)
redVsBlue :: SimpleColourMap
redVsBlue
= simpleColourMap (spanColourPlane neutralc (rgb 0.9 0 0.2, rgb 0.1 0.3 1)) (-1/2)
where neutralc = ColourNeedle $ RGB (-1.2) (-0.5) (-1.5)
brightVsRed :: SimpleColourMap
brightVsRed
= simpleColourMap (spanColourPlane neutralc (white, orangered)) 1
where Just neutralc = toInterior (darkgrey :: Colour ℝ)
| leftaroundabout/colour-space | src/Data/Colour/Manifold.hs | gpl-3.0 | 16,761 | 118 | 17 | 4,622 | 5,908 | 3,137 | 2,771 | 325 | 1 |
{- Цель - сделать генерацию минимального подмножества sql, через которое можно выразить все операции-}
module Database.SQL.SQLSolvent.NaiveSql (
Ariphm(Plus, Minus, Multi, Div)
,ClauseOp(Much, Less, Equ, NoEqu,MEqu, LEqu)
,Logic(And, Or)
,Clause(Clause)
,Where (Where)
,From (From)
,Join (LeftJoin, InnerJoin)
,SelectList (SelectList)
,Select (Select)
) where
import Database.SQL.SQLSolvent.Types
import Data.Text as T
import Data.List as L
--mssql
--table, subquery
data Ariphm = Plus | -- +
Minus | -- -
Multi | -- *
Div -- /
data ClauseOp = Much | -- >
Less | -- <
Equ | -- =
NoEqu | -- !=
MEqu | -- >=
LEqu -- <=
instance Show ClauseOp where
show Much = " > "
show Less = " < "
show Equ = " = "
show NoEqu = " != "
show MEqu = " >= "
show LEqu = " <= "
data Logic = And |
Or
--like t1.c1 = t2.c2
data Clause = Clause ((TableName, FieldName), ClauseOp, (TableName, FieldName))
instance Show Clause where
show (Clause ((t1, f1), op, (t2, f2))) = show t1 ++ "." ++ show f1 ++ show op ++ show t2 ++ "." ++ show f2
data Where = Where [Clause]
instance Show Where where
show (Where clauses) = " WHERE " ++ L.foldl1 (++) (fmap show clauses)
data From = From TableName
instance Show From where
show (From t) = " FROM " ++ show t
data Join = LeftJoin TableName [Clause] | InnerJoin TableName [Clause]
instance Show Join where
show (LeftJoin t clauses) = "LEFT JOIN " ++ T.unpack t ++ " on " ++ L.foldl1 (++) (fmap show clauses)
show (InnerJoin t clauses) = "INNER JOIN " ++ T.unpack t ++ " on " ++ L.foldl1 (++) (fmap show clauses)
data SelectList = SelectList [(TableName, FieldName)]
instance Show SelectList where
show (SelectList fields) =
let showL (a,b) = T.append (T.append a $ T.append (T.pack ".") b) $ T.pack ", "
in T.unpack $ L.foldl1 (T.append) (fmap showL fields)
data Select = Select SelectList From [Join] --Where
instance Show Select where
show (Select fields from joins ) = --whr
let
in "SELECT " ++
(show fields) ++
(show from) ++
(L.concat $ fmap show joins)
-- ++ (show whr)
{-
renderNaiveSql :: LocGraph -> Markers -> String
renderNaiveSql lc (nm, em) = undefined
let
nodesinmarkers = filter (\(tid,_) -> tid `elem` $ fmap (\(tidd, _) -> tidd) nm ) (labNodes lc)
edgesinmarkers = filter (\(eid,_) -> eid `elem` $ fmap (\(_, _, (eidd, _)) -> eidd) em ) (laEdges lc)
querygraph = mkGraph (nodesinmarkers) (edgesinmarkers)
headRankTable = snd $ maximum $ zip ( (fmap (deg querygraph) $ nodes querygraph) $ labNodes querygraph
path = lbft
in show $ Select (SelectList ) (From (tName $ snd headnode))
[LeftJoin t
[Clause ((), Equ, ()) | ]
| ]
--Where -}
--data InsertInto = InsertInto TableName [FieldName] Select
--data Update = Update TableName [FieldName]
--data Create =
--data Drop =
| nixorn/SQL-Solvent | src/Database/SQL/SQLSolvent/NaiveSql.hs | gpl-3.0 | 3,521 | 0 | 17 | 1,219 | 789 | 451 | 338 | 76 | 0 |
{-# LANGUAGE RankNTypes, ExistentialQuantification #-}
module BlastItWithPiss.Captcha
(CurrentSsachCaptchaType
,unsafeMakeYandexCaptchaAnswer
,ssachRecaptchaKey
,cloudflareRecaptchaKey
,CAnswer(..)
,Captcha(..)
,Recaptcha(..)
,recaptchaChallengeKey
,Yandex(..)
,makabaCaptcha
) where
import Import
import BlastItWithPiss.MonadChoice
import BlastItWithPiss.Board
import BlastItWithPiss.Blast
import Control.Monad.Trans.Resource
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Text.Recognition.Antigate
type CurrentSsachCaptchaType = Yandex
ssachRecaptchaKey :: String
ssachRecaptchaKey = "6LdOEMMSAAAAAIGhmYodlkflEb2C-xgPjyATLnxx"
cloudflareRecaptchaKey :: String
cloudflareRecaptchaKey = "6LeT6gcAAAAAAAZ_yDmTMqPH57dJQZdQcu6VFqog"
-- ssachSolveMediaKey :: String
-- ssachSolveMediaKey = "oIzJ06xKCH-H6PKr8OLVMa26G06kK3qh"
type UserCode = String
data CAnswer m m' = CAnswer
{ cAdaptive :: !Bool -- ^ Adaptive captcha?
, cFields :: [Part m m']
}
deriving Show
instance Default (CAnswer m m') where
def = CAnswer True []
class Captcha a where
-- | Check if any captcha is needed and return either premade fields or key
-- needed to solve challenge.
getNewCaptcha
:: (MonadChoice m, MonadResource m')
=> Board -> Maybe Int -> UserCode
-> Blast (Either (CAnswer m m') a)
-- | If they use systems like recaptcha or solveMedia, then we know their
-- public key beforehand, so we don't have to query makaba to get our challenge.
unsafeGenNewCaptcha :: Maybe (Blast a)
unsafeGenNewCaptcha = Nothing
getCaptchaImage :: a -> Blast (LByteString, MimeType)
applyCaptcha
:: (MonadChoice m, MonadResource m')
=> a -> String
-> Blast (CAnswer m m')
getCaptchaConf :: a -> Blast CaptchaConf
newtype Recaptcha = Recaptcha {recaptchaKey :: String}
deriving (Eq, Show)
instance Captcha Recaptcha where
getNewCaptcha _ _ usercode = do
res <- makabaCaptcha usercode
if T.isPrefixOf "OK" res || T.isPrefixOf "VIP" res
then return $ Left $ CAnswer True []
else Right . Recaptcha <$> recaptchaChallengeKey ssachRecaptchaKey
unsafeGenNewCaptcha =
Just (Recaptcha <$> recaptchaChallengeKey ssachRecaptchaKey)
getCaptchaImage (Recaptcha chKey) = do
res <- httpGetLbs $
"http://www.google.com/recaptcha/api/image?c=" ++ chKey
return (res, "image/jpg")
applyCaptcha (Recaptcha chKey) answer = return $ CAnswer False $
[partBS "recaptcha_challenge_field" (fromString chKey)
,partBS "recaptcha_response_field" (T.encodeUtf8 $ T.pack answer)]
getCaptchaConf _ = return $ def
{phrase = True}
newtype Yandex = Yandex {yandexKey :: String}
deriving (Eq, Show)
instance Captcha Yandex where
getNewCaptcha _ _ usercode = do
res <- makabaCaptcha usercode
if T.isPrefixOf "OK" res || T.isPrefixOf "VIP" res
then return $ Left $ CAnswer True []
else
let str = T.unpack res
in return $ Right $ Yandex $ lastNote (yandexerr str) $ lines str
where
yandexerr a =
"Yandex captcha: Challenge ID not found in \"" ++ a ++ "\". Update code."
unsafeGenNewCaptcha = Nothing
getCaptchaImage (Yandex chKey) = do
res <- httpGetLbs $ "http://i.captcha.yandex.net/image?key=" ++ chKey
return (res, "image/gif")
applyCaptcha (Yandex chKey) answer =
return $ unsafeMakeYandexCaptchaAnswer chKey answer
getCaptchaConf _ = return $ def
{numeric = Just True}
unsafeMakeYandexCaptchaAnswer
:: (Monad m, Monad m') => String -> String -> CAnswer m m'
unsafeMakeYandexCaptchaAnswer chKey answer =
CAnswer False
[partBS "captcha" (fromString chKey)
,partBS "captcha_value_id_06" (fromString answer)
]
-- | Query adaptive captcha state
makabaCaptcha :: String -> Blast Text
makabaCaptcha usercode = do
let code = if not $ null usercode then "?usercode=" ++ usercode else []
responseBody <$> httpReqStr
(unsafeParseUrl $ ssach ++ "/makaba/captcha.fcgi" ++ code)
{requestHeaders = [(hAccept, "text/html, */*; q=0.01")
,("X-Requested-With", "XMLHttpRequest")
-- ,(hReferer, ssachThread board thread)
]}
recaptchaChallengeKey :: String -> Blast String
recaptchaChallengeKey key = do
rawjsstr <- T.unpack <$> httpGetStr recaptchaUrl
return $ fromMaybe (error $ fatalErrorMsg ++ ": " ++ rawjsstr) $
findMap getChallenge $ lines rawjsstr
where getChallenge s =
takeUntil (=='\'') <$>
stripPrefix "challenge : \'" (dropWhile isSpace s)
fatalErrorMsg =
"FATAL ERROR: getNewCaptcha: Recaptcha changed their "
++ "JSON formatting, update code"
recaptchaUrl =
"http://api.recaptcha.net/challenge?k=" ++ key ++ "&lang=en"
{-
getChallengeKey :: String -> Blast String
getChallengeKey key = do
rawjsstr <- T.unpack <$> httpGetStr ("http://api.recaptcha.net/challenge?k=" ++ key ++ "&lang=en")
return $ headNote (fatalErrorMsg ++ ": " ++ rawjsstr) $
mapMaybe getChallenge $ lines rawjsstr
where getChallenge s =
takeUntil (=='\'') <$> stripPrefix "challenge : \'" (dropWhile isSpace s)
fatalErrorMsg = "FATAL ERROR: getChallengeKey: Recaptcha changed their JSON formatting, update code"
reloadCaptcha :: String -> String -> Blast ()
reloadCaptcha key chKey = void $
httpGet $ "http://www.google.com/recaptcha/api/reload?c="
++ chKey ++ "&k=" ++ key ++ "&reason=r&type=image&lang=en"
getCaptchaImage :: String -> Blast LByteString
getCaptchaImage chKey =
httpGetLbs $ "http://www.google.com/recaptcha/api/image?c=" ++ chKey
ssachGetCaptcha :: Board -> Maybe Int -> String -> String -> Blast (Maybe LByteString)
ssachGetCaptcha board thread key chKey =
ifM (doWeNeedCaptcha board thread "")
(do reloadCaptcha key chKey
Just <$> getCaptchaImage chKey)
(return Nothing)
-}
{-
data SolveMedia = SolveMedia
{solveMediaKey :: String
,solveMediaMagic :: String
,solveMediaTStamp :: !Int
,solveMediaChalStamp :: !Int
,solveMediaCallbacks :: !(IORef Int) -- ^ How many times we reloaded captcha
,solveMediaFwv :: String
,solveMediaChid :: !(IORef String)
}
instance Captcha SolveMedia where
getNewCaptcha board thread usercode = do
res <- makabaCaptcha board thread usercode
if T.isInfixOf "OK" res || T.isInfixOf "VIP" res
then return $ Left $ CAnswer True []
else do
let ckey = fromMaybe ssachSolveMediaKey $ atMay $ lines res
chScriptLines <- fmap (lines . T.unpack) $ accept $ httpGetLbs $
"http://api.solvemedia.com/papi/challenge.script?k=" ++ ckey
let
!magic = fromMaybe (magicerr $ unlines chScriptLines) $
flip findMap chScriptLines $ \x ->
takeUntil (=='\'') . tail . dropUntil (=='\'') <$>
stripPrefix "magic" (dropWhile isSpace x)
!chalstamp = fromMaybe (chalstamperr $ unlines chScriptLines) $
flip findMap chScriptLines $ \x ->
readMay . takeUntil (==',') . dropUntil isNumber =<<
stripPrefix "chalstamp" (dropWhile isSpace x)
_puzzle <- accept $ httpGetLbs "http://api.solvemedia.com/papi/_puzzle.js"
fwid <- take 4 <$> getRandomRs ('a', 'z')
ctx_bN :: Int <- getRandomR (10, 99)
where
accept = withOverrideHeader (hAccept, "*/*")
magicerr = error . ("SolveMedia: Couldn't read magic from " ++)
chalstamperr = error . ("SolveMedia: Couldn't read chalstamp from " ++)
unsafeGenNewCaptcha = Just $
-}
| exbb2/BlastItWithPiss | src/BlastItWithPiss/Captcha.hs | gpl-3.0 | 7,979 | 0 | 15 | 2,044 | 1,173 | 622 | 551 | 111 | 2 |
runReader :: Reader e a -> e -> a
runReader (Reader f) e = f e | hmemcpy/milewski-ctfp-pdf | src/content/3.5/code/haskell/snippet07.hs | gpl-3.0 | 62 | 0 | 7 | 15 | 38 | 18 | 20 | 2 | 1 |
{-# LANGUAGE CPP #-}
{- |
UTF-8 aware string IO functions that will work across multiple platforms
and GHC versions. Includes code from Text.Pandoc.UTF8 ((C) 2010 John
MacFarlane).
Example usage:
import Prelude hiding (readFile,writeFile,appendFile,getContents,putStr,putStrLn)
import UTF8IOCompat (readFile,writeFile,appendFile,getContents,putStr,putStrLn)
import UTF8IOCompat (SystemString,fromSystemString,toSystemString,error',userError')
2013/4/10 update: we now trust that current GHC versions & platforms
do the right thing, so this file is a no-op and on its way to being removed.
Not carefully tested.
-}
-- TODO obsolete ?
module Hledger.Utils.UTF8IOCompat (
readFile,
writeFile,
appendFile,
getContents,
hGetContents,
putStr,
putStrLn,
hPutStr,
hPutStrLn,
--
SystemString,
fromSystemString,
toSystemString,
error',
userError',
usageError,
)
where
-- import Control.Monad (liftM)
-- import qualified Data.ByteString.Lazy as B
-- import qualified Data.ByteString.Lazy.Char8 as B8
-- import qualified Data.ByteString.Lazy.UTF8 as U8 (toString, fromString)
import Prelude hiding (readFile, writeFile, appendFile, getContents, putStr, putStrLn)
import System.IO -- (Handle)
-- #if __GLASGOW_HASKELL__ < 702
-- import Codec.Binary.UTF8.String as UTF8 (decodeString, encodeString, isUTF8Encoded)
-- import System.Info (os)
-- #endif
-- bom :: B.ByteString
-- bom = B.pack [0xEF, 0xBB, 0xBF]
-- stripBOM :: B.ByteString -> B.ByteString
-- stripBOM s | bom `B.isPrefixOf` s = B.drop 3 s
-- stripBOM s = s
-- readFile :: FilePath -> IO String
-- readFile = liftM (U8.toString . stripBOM) . B.readFile
-- writeFile :: FilePath -> String -> IO ()
-- writeFile f = B.writeFile f . U8.fromString
-- appendFile :: FilePath -> String -> IO ()
-- appendFile f = B.appendFile f . U8.fromString
-- getContents :: IO String
-- getContents = liftM (U8.toString . stripBOM) B.getContents
-- hGetContents :: Handle -> IO String
-- hGetContents h = liftM (U8.toString . stripBOM) (B.hGetContents h)
-- putStr :: String -> IO ()
-- putStr = bs_putStr . U8.fromString
-- putStrLn :: String -> IO ()
-- putStrLn = bs_putStrLn . U8.fromString
-- hPutStr :: Handle -> String -> IO ()
-- hPutStr h = bs_hPutStr h . U8.fromString
-- hPutStrLn :: Handle -> String -> IO ()
-- hPutStrLn h = bs_hPutStrLn h . U8.fromString
-- -- span GHC versions including 6.12.3 - 7.4.1:
-- bs_putStr = B8.putStr
-- bs_putStrLn = B8.putStrLn
-- bs_hPutStr = B8.hPut
-- bs_hPutStrLn h bs = B8.hPut h bs >> B8.hPut h (B.singleton 0x0a)
-- | A string received from or being passed to the operating system, such
-- as a file path, command-line argument, or environment variable name or
-- value. With GHC versions before 7.2 on some platforms (posix) these are
-- typically encoded. When converting, we assume the encoding is UTF-8 (cf
-- <http://www.dwheeler.com/essays/fixing-unix-linux-filenames.html#UTF8>).
type SystemString = String
-- | Convert a system string to an ordinary string, decoding from UTF-8 if
-- it appears to be UTF8-encoded and GHC version is less than 7.2.
fromSystemString :: SystemString -> String
-- #if __GLASGOW_HASKELL__ < 702
-- fromSystemString s = if UTF8.isUTF8Encoded s then UTF8.decodeString s else s
-- #else
fromSystemString = id
-- #endif
-- | Convert a unicode string to a system string, encoding with UTF-8 if
-- we are on a posix platform with GHC < 7.2.
toSystemString :: String -> SystemString
-- #if __GLASGOW_HASKELL__ < 702
-- toSystemString = case os of
-- "unix" -> UTF8.encodeString
-- "linux" -> UTF8.encodeString
-- "darwin" -> UTF8.encodeString
-- _ -> id
-- #else
toSystemString = id
-- #endif
-- | A SystemString-aware version of error.
error' :: String -> a
error' =
#if __GLASGOW_HASKELL__ < 800
-- (easier than if base < 4.9)
error . toSystemString
#else
errorWithoutStackTrace . toSystemString
#endif
-- | A SystemString-aware version of userError.
userError' :: String -> IOError
userError' = userError . toSystemString
-- | A SystemString-aware version of error that adds a usage hint.
usageError :: String -> a
usageError = error' . (++ " (use -h to see usage)")
| mstksg/hledger | hledger-lib/Hledger/Utils/UTF8IOCompat.hs | gpl-3.0 | 4,261 | 0 | 6 | 793 | 245 | 178 | 67 | 31 | 1 |
-- TTyped: A dependently typed programming language.
-- Copyright (C) 2018 Taran Lynn
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <https://www.gnu.org/licenses/>.
module Check where
import Reduce
import Representation
import Data.Foldable (foldlM, foldrM)
data Error = VarNotInContext String Nat Context
| InvalidArgType Term
| TypeMismatch Term Term
| NonQuantTypeApplied Term Term
deriving (Eq, Show)
ppError :: Error -> String
ppError (VarNotInContext name idx context) = "Variable " ++ name
++ "[" ++ show idx ++ "] not in context " ++ ppContext context
ppError (InvalidArgType t) = "Invalid argument type " ++ ppTerm t
++ ", argument type must either be a context or an object of type *"
ppError (TypeMismatch t1 t2) = "Got " ++ ppTerm t2 ++ " when expecting " ++ ppTerm t1
ppError (NonQuantTypeApplied t1 t2) = "Non quantified type " ++ ppTerm t1
++ " applied to " ++ ppTerm t2
checkTerm :: Term -> Context -> Either Error ()
checkTerm (C c) context = checkContext c context
checkTerm (O o) context = checkObject o context >> return ()
-- | Checks that argument types in a context are well typed.
checkContext :: Context -> Context -> Either Error ()
checkContext Star _ = return ()
checkContext (Quant name t c) context =
do checkArgType t context
checkContext c (concatTerm context name t)
return ()
-- | Returns the type of the object passed in if there's no errors.
checkObject :: Object -> Context -> Either Error Term
checkObject (Var name index) context = asSeenFrom name index context
checkObject (Prod name t o) context =
do checkArgType t context
checkObject o (concatTerm context name (reduceTerm t))
return (C Star)
checkObject (Fun name t o) context =
do checkArgType t context
let t' = reduceTerm t
ot <- checkObject o (concatTerm context name t')
case ot of
(C c) -> return (C (Quant name t' (reduceContext c)))
(O o) -> return (O (Prod name t' (reduceObject o)))
checkObject (App o1 o2) context =
do o1t <- checkObject o1 context
o2t <- checkObject o2 context
checkTerm o1t context
checkTerm o2t context
o3 <- checkApply (reduceTerm o1t) (reduceTerm o2t) o2
return (reduceTerm o3)
checkObject (Axiom _ t) context =
do checkArgType t context
return (reduceTerm t)
-- | Checks that the type of an an argument is well typed.
-- Argument types should either be contexts or objects of type *.
checkArgType :: Term -> Context -> Either Error ()
checkArgType (C c) ctx = checkContext c ctx
checkArgType (O o) ctx = do t <- checkObject o ctx
case t of
C Star -> return ()
_ -> Left (InvalidArgType t)
-- | Gets the type of a variable as seen from its surrounding context.
asSeenFrom :: String -> Nat -> Context -> Either Error Term
asSeenFrom name index context =
do t <- getTerm index context (contextLength context)
return (addTerm (index + 1) t)
where
getTerm _ Star _ = Left (VarNotInContext name index context)
getTerm index (Quant _ t c) len =
if index == (len - 1) then return t
else getTerm index c (len - 1)
-- | Returns the type of applying some type to another type. The third argument
-- is the object being applied.
checkApply :: Term -> Term -> Object -> Either Error Term
checkApply (C (Quant _ t1 c)) t2 o =
if unify t1 t2 then return (C (substContext c o)) else Left (TypeMismatch t1 t2)
checkApply (O (Prod _ t1 o1)) t2 o2 =
if unify t1 t2 then return (O (substObject o1 o2)) else Left (TypeMismatch t1 t2)
checkApply t1 t2 _ = Left (NonQuantTypeApplied t1 t2)
-- | Determines if two terms are the same. This is basically a test for equality
-- that ignores variable names.
unify :: Term -> Term -> Bool
unify (C c1) (C c2) = unifyContexts c1 c2
unify (O o1) (O o2) = unifyObjects o1 o2
unify _ _ = False
unifyContexts Star Star = True
unifyContexts (Quant _ t1 c1) (Quant _ t2 c2) = (unify t1 t2) && (unifyContexts c1 c2)
unifyContexts _ _ = False
unifyObjects (Var _ idx1) (Var _ idx2) = idx1 == idx2
unifyObjects (Prod _ t1 o1) (Prod _ t2 o2) = (unify t1 t2) && (unifyObjects o1 o2)
unifyObjects (Fun _ t1 o1) (Fun _ t2 o2) = (unify t1 t2) && (unifyObjects o1 o2)
unifyObjects (App o1 o2) (App o3 o4) = (unifyObjects o1 o3) && (unifyObjects o2 o4)
unifyObjects (Axiom name1 t1) (Axiom name2 t2) = (name1 == name2) && (unify t1 t2)
unifyObjects _ _ = False
| lambda-11235/ttyped | src/Check.hs | gpl-3.0 | 5,050 | 0 | 16 | 1,152 | 1,606 | 791 | 815 | 82 | 3 |
module Hadolint.Rule.DL3036 (rule) where
import Hadolint.Rule
import qualified Hadolint.Shell as Shell
import Language.Docker.Syntax
rule :: Rule Shell.ParsedShell
rule = simpleRule code severity message check
where
code = "DL3036"
severity = DLWarningC
message = "`zypper clean` missing after zypper use."
check (Run (RunArgs args _)) =
foldArguments (Shell.noCommands zypperInstall) args
|| ( foldArguments (Shell.anyCommands zypperInstall) args
&& foldArguments (Shell.anyCommands zypperClean) args
)
check _ = True
zypperInstall = Shell.cmdHasArgs "zypper" ["install", "in"]
zypperClean = Shell.cmdHasArgs "zypper" ["clean", "cc"]
{-# INLINEABLE rule #-}
| lukasmartinelli/hadolint | src/Hadolint/Rule/DL3036.hs | gpl-3.0 | 733 | 0 | 13 | 155 | 189 | 103 | 86 | 16 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.File.Projects.Locations.Instances.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes an instance.
--
-- /See:/ <https://cloud.google.com/filestore/ Cloud Filestore API Reference> for @file.projects.locations.instances.delete@.
module Network.Google.Resource.File.Projects.Locations.Instances.Delete
(
-- * REST Resource
ProjectsLocationsInstancesDeleteResource
-- * Creating a Request
, projectsLocationsInstancesDelete
, ProjectsLocationsInstancesDelete
-- * Request Lenses
, plidXgafv
, plidUploadProtocol
, plidAccessToken
, plidUploadType
, plidName
, plidCallback
) where
import Network.Google.File.Types
import Network.Google.Prelude
-- | A resource alias for @file.projects.locations.instances.delete@ method which the
-- 'ProjectsLocationsInstancesDelete' request conforms to.
type ProjectsLocationsInstancesDeleteResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Operation
-- | Deletes an instance.
--
-- /See:/ 'projectsLocationsInstancesDelete' smart constructor.
data ProjectsLocationsInstancesDelete =
ProjectsLocationsInstancesDelete'
{ _plidXgafv :: !(Maybe Xgafv)
, _plidUploadProtocol :: !(Maybe Text)
, _plidAccessToken :: !(Maybe Text)
, _plidUploadType :: !(Maybe Text)
, _plidName :: !Text
, _plidCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsInstancesDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'plidXgafv'
--
-- * 'plidUploadProtocol'
--
-- * 'plidAccessToken'
--
-- * 'plidUploadType'
--
-- * 'plidName'
--
-- * 'plidCallback'
projectsLocationsInstancesDelete
:: Text -- ^ 'plidName'
-> ProjectsLocationsInstancesDelete
projectsLocationsInstancesDelete pPlidName_ =
ProjectsLocationsInstancesDelete'
{ _plidXgafv = Nothing
, _plidUploadProtocol = Nothing
, _plidAccessToken = Nothing
, _plidUploadType = Nothing
, _plidName = pPlidName_
, _plidCallback = Nothing
}
-- | V1 error format.
plidXgafv :: Lens' ProjectsLocationsInstancesDelete (Maybe Xgafv)
plidXgafv
= lens _plidXgafv (\ s a -> s{_plidXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
plidUploadProtocol :: Lens' ProjectsLocationsInstancesDelete (Maybe Text)
plidUploadProtocol
= lens _plidUploadProtocol
(\ s a -> s{_plidUploadProtocol = a})
-- | OAuth access token.
plidAccessToken :: Lens' ProjectsLocationsInstancesDelete (Maybe Text)
plidAccessToken
= lens _plidAccessToken
(\ s a -> s{_plidAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
plidUploadType :: Lens' ProjectsLocationsInstancesDelete (Maybe Text)
plidUploadType
= lens _plidUploadType
(\ s a -> s{_plidUploadType = a})
-- | Required. The instance resource name, in the format
-- projects\/{project_id}\/locations\/{location}\/instances\/{instance_id}
plidName :: Lens' ProjectsLocationsInstancesDelete Text
plidName = lens _plidName (\ s a -> s{_plidName = a})
-- | JSONP
plidCallback :: Lens' ProjectsLocationsInstancesDelete (Maybe Text)
plidCallback
= lens _plidCallback (\ s a -> s{_plidCallback = a})
instance GoogleRequest
ProjectsLocationsInstancesDelete
where
type Rs ProjectsLocationsInstancesDelete = Operation
type Scopes ProjectsLocationsInstancesDelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsLocationsInstancesDelete'{..}
= go _plidName _plidXgafv _plidUploadProtocol
_plidAccessToken
_plidUploadType
_plidCallback
(Just AltJSON)
fileService
where go
= buildClient
(Proxy ::
Proxy ProjectsLocationsInstancesDeleteResource)
mempty
| brendanhay/gogol | gogol-file/gen/Network/Google/Resource/File/Projects/Locations/Instances/Delete.hs | mpl-2.0 | 4,976 | 0 | 15 | 1,069 | 697 | 408 | 289 | 103 | 1 |
import Data.Char (digitToInt)
safeHead [] = Nothing
safeHead (x:xs) = Just x
safeTail [] = Nothing
safeTail (x:xs) = Just xs
safeLast [] = Nothing
safeLast [x] = Just x
safeLast (x:xs) = safeLast xs
safeInit [] = Nothing
safeInit [x] = Just []
safeInit (x:xs) = safeCons x (safeInit xs)
where
safeCons _ Nothing = Nothing
safeCons x (Just xs) = Just (x:xs)
--takeWhileRest :: (a -> Bool) -> [a] -> ([a]. [a])
splitWith :: (a -> Bool) -> [a] -> [[a]]
splitWith p xs = map reverse $ splitWith' p [] xs
splitWith' p word [] = [word]
splitWith' p word (x:xs) =
if p x
then splitWith' p (x:word) xs
else word : splitWith' p [] xs
instance Monad (Either String) where
Right x >>= k = k x
Left x >>= k = Left x
return = Right
fail s = Right s
saveDigitToInt d
| d >= '0' && d <= '9' = Right $ digitToInt d
| otherwise = Left $ concat ["non-digit '", [d], "'"]
asPosInt = foldl f 0
where
f acc c = acc * 10 + digitToInt c
asInt ('-':cs) = - asPosInt cs
asInt cs = asPosInt cs
| wginolas/playground | haskell/rwh/ch04/ch04.hs | lgpl-3.0 | 1,013 | 2 | 10 | 244 | 518 | 258 | 260 | 32 | 2 |
module Helpers.ChessSequences (chessMoveCounter, queenN, queenNW, queenW, queenSW, kingN, kingNW, kingW, kingSW) where
import Data.MemoCombinators (memo2, integral)
-- Speed this up by keeping row/column/diagonal sums locally.
chessMoveCounter :: (Integer -> Integer -> [(Integer, Integer)]) -> Integer -> Integer -> Integer
chessMoveCounter f = memoizedCounter where
memoizedCounter = memo2 integral integral computer where
computer 0 _ = 0
computer _ 0 = 0
computer 1 1 = 1
computer n k = sum $ map (uncurry memoizedCounter) $ f n k
queenN n k = map (\n' -> (n',k)) [1..n-1]
queenNW n k = map (\m -> (n-m, k-m)) [1..min n k - 1]
queenW n k = map (\k' -> (n, k')) [1..k-1]
queenSW n k = map (\m -> (n+m, k-m)) [1..k-1]
kingN n k = [(n - 1, k) | n > 1]
kingNW n k = [(n - 1, k - 1) | k > 1, n > 1]
kingW n k = [(n, k - 1) | k > 1]
kingSW n k = [(n + 1, k - 1) | k > 1]
| peterokagey/haskellOEIS | src/Helpers/ChessSequences.hs | apache-2.0 | 912 | 0 | 13 | 220 | 485 | 265 | 220 | 17 | 4 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import ClassyPrelude
import Conduit
import Data.Conduit
import Filesystem
import Concord.IO
import Concord.Types
import Opts
import Types
main :: IO ()
main = do
CO inputDir outputFile <- execParser opts
let output = maybe (stdoutC :: Consumer String (ResourceT IO) ())
sinkFile
outputFile
absInput <- canonicalizePath inputDir
runResourceT $ walkCorpus absInput
$= toLines
$$ mapC show
=$ unlinesC
=$ output
| erochest/concord | Main.hs | apache-2.0 | 716 | 0 | 14 | 289 | 142 | 73 | 69 | 23 | 1 |