code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE OverloadedStrings #-}
module Collab.Test.ResponseTests
( tests
) where
import Test.HUnit
import Collab.Response
import Collab.Json
tests = TestList
[ TestCase $ "foo@bar{}" @=? makeMessage "foo" "bar" "{}"
, TestCase $ "member@bar{\"me\":true,\"name\":\"bar\",\"id\":\"foo\"}" @=?
makeMessageT "bar" (Member "foo" "bar" True)
]
| dennis84/collab-haskell | test/Collab/Test/ResponseTests.hs | mit | 370 | 0 | 10 | 69 | 80 | 44 | 36 | 10 | 1 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE DefaultSignatures #-}
-- |
-- Module : Exercise3
-- Description : Generics by Overloading & Clean Native Generics
-- Copyright : (c) Tom Westerhout, 2017
-- License : MIT
module Exercise3 (
Bin(..),
-- * Kind-Index Generic Serialisation
-- Our 'Exercise2.Serialise' class in "Exercise2" is indeed only suitable
-- for serialisation of types of kind @*@. There is, however, an
-- interesting qoute I've come across:
--
-- > In Haskell 98, * is the only inhabited kind, that is, all values
-- > have types of kind *. GHC introduces another inhabited kind, #, for
-- > unlifted types.
--
-- So what I don't understand is why would one need a @serialise@ function
-- for kinds other than @*@? Generic mapping, equality etc. -- sure, why
-- not, but serialisation -- I don't see it... So I'm not going to do this
-- part as I don't see how it differs from "Exercise2".
-- * Serialisation using Haskell's native Generics
-- I got writing working, but not reading. GHC generates @Read@
-- instances at compile-time, so it should be possible to obtain
-- contructor names at compile-time... I failed to find a way to do it
-- using @Generics@.
SerialiseImpl(readPrecImpl, writePrecImpl),
Serialise(readPrec', writePrec')
-- * Rest
-- The night has passed, it's 6:30 a.m. so I'm afraid I'll stop here.
) where
import Prelude.Unicode
import Data.Maybe
import Control.Arrow(first, second)
import Control.Applicative((<|>))
import Control.Monad
import GHC.Generics
import Text.ParserCombinators.ReadP
import Data.Char(isSpace)
import qualified Data.List
type ReadS' α = String → Maybe (α, String)
appPrec ∷ Int
appPrec = 10
step ∷ Int → Int
step p = (p + 1) `mod` 11
readParensIf ∷ Bool → ReadP α → ReadP α
readParensIf p x = if p then mandatory
else (x +++ mandatory)
where mandatory = between (char '(') (char ')') x
showParensIf ∷ Bool → String → String
showParensIf p s = if p then "(" ++ s ++ ")"
else s
sepAndParen ∷ Bool → [String] → String
sepAndParen p xs =
case filter (not . null) xs of
[] -> ""
[x] -> x
xs -> showParensIf p $ Data.List.concat . Data.List.intersperse " " $ xs
-- | Compact @Serialise@ typeclass, i.e. no generic information is included
-- in the serialised data.
class SerialiseImpl α where
readPrecImpl ∷ Int → ReadP (α χ)
writePrecImpl ∷ Int → (α χ) → String
class Serialise α where
readPrec' ∷ Int → ReadP α
writePrec' ∷ Int → α → String
default readPrec' ∷ (Generic α, SerialiseImpl (Rep α))
⇒ Int → ReadP α
readPrec' p = readPrecImpl p >>= return . to
default writePrec' ∷ (Generic α, SerialiseImpl (Rep α))
⇒ Int → α → String
writePrec' p x = writePrecImpl p (from x)
-- |
-- Both functions undefined, because one simply can create no instances of
-- type 'V1'.
instance SerialiseImpl V1 where
readPrecImpl = undefined
writePrecImpl = undefined
-- |
-- Reading always succeeds as there's nothing to read; writing produces an
-- empty string.
instance SerialiseImpl U1 where
readPrecImpl _ = return U1
writePrecImpl _ U1 = ""
instance (SerialiseImpl α, SerialiseImpl β) => SerialiseImpl (α :+: β) where
readPrecImpl p = ((readPrecImpl p ∷ (SerialiseImpl α) ⇒ ReadP (α χ))
>>= return . L1)
+++ ((readPrecImpl p ∷ (SerialiseImpl β) ⇒ ReadP (β χ))
>>= return . R1)
writePrecImpl p (L1 x) = writePrecImpl p x
writePrecImpl p (R1 x) = writePrecImpl p x
instance (SerialiseImpl α, SerialiseImpl β) => SerialiseImpl (α :*: β) where
readPrecImpl p = readParensIf False $
do x <- readPrecImpl 11
skipSpaces
y <- readPrecImpl 11
return (x :*: y)
writePrecImpl p (x :*: y) = sepAndParen False $
[writePrecImpl 11 x, writePrecImpl 11 y]
instance (Serialise ν) => SerialiseImpl (K1 ι ν) where
readPrecImpl p = readPrec' p >>= return . K1
writePrecImpl p (K1 x) = writePrec' p x
instance (SerialiseImpl α, Constructor ξ) => SerialiseImpl (M1 C ξ α) where
readPrecImpl ∷ Int → ReadP (M1 C ξ α χ)
readPrecImpl p =
do string "Leaf" -- This is just to make the code compile. Next
-- line is nicer, but doesn't compile...
-- string $ conName ((M1 undefined)
-- :: (SerialiseImpl α, Constructor ξ) ⇒ (M1 C ξ α χ))
skipSpaces
x <- readPrecImpl 11
return (M1 x)
writePrecImpl p c@(M1 x) = sepAndParen (p > appPrec) $
[conName c, writePrecImpl 11 x]
instance (SerialiseImpl α, Selector σ) => SerialiseImpl (M1 S σ α) where
readPrecImpl p = readPrecImpl p >>= return . M1
writePrecImpl p (M1 x) = writePrecImpl p x
instance (SerialiseImpl α) => SerialiseImpl (M1 D δ α) where
readPrecImpl p = readPrecImpl p >>= return . M1
writePrecImpl p (M1 x) = writePrecImpl p x
data Bin a = Leaf | Bin (Bin a) a (Bin a) deriving (Eq, Read, Show, Generic)
instance Serialise Int where
readPrec' p = readS_to_P $ readsPrec p
writePrec' p x = show x
instance Serialise (Bin Int)
| twesterhout/NWI-I00032-2017-Assignments | Exercise_3/Exercise3.hs | mit | 5,554 | 0 | 14 | 1,406 | 1,364 | 726 | 638 | 99 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Network.JSONApi.PaginationSpec where
import Data.Map (toList)
import Data.Maybe (fromJust)
import Network.JSONApi
import Network.URL (importURL)
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec =
describe "Pagination" $ do
it "should return mandatory keys" $ do
let p = Pagination (PageIndex 2) (PageSize 10) (ResourceCount 30)
let results = mkPaginationLinks PageStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
map fst links `shouldBe` ["first", "last", "next", "prev"]
it "should return proper hrefs for paging strategy" $ do
let p = Pagination (PageIndex 2) (PageSize 10) (ResourceCount 30)
let results = mkPaginationLinks PageStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
links `shouldBe` [("first", "/users?page%5bsize%5d=10&page%5bnumber%5d=1"),
("last", "/users?page%5bsize%5d=10&page%5bnumber%5d=3"),
("next", "/users?page%5bsize%5d=10&page%5bnumber%5d=3"),
("prev", "/users?page%5bsize%5d=10&page%5bnumber%5d=1")]
it "should return proper hrefs for offset strategy" $ do
let p = Pagination (PageIndex 1) (PageSize 10) (ResourceCount 30)
let results = mkPaginationLinks OffsetStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
links `shouldBe` [("first", "/users?page%5blimit%5d=10&page%5boffset%5d=0"),
("last", "/users?page%5blimit%5d=10&page%5boffset%5d=2"),
("next", "/users?page%5blimit%5d=10&page%5boffset%5d=2"),
("prev", "/users?page%5blimit%5d=10&page%5boffset%5d=0")]
it "should support the page strategy" $ do
let p = Pagination (PageIndex 0) (PageSize 10) (ResourceCount 20)
let results = mkPaginationLinks PageStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
(snd . head) links `shouldBe` "/users?page%5bsize%5d=10&page%5bnumber%5d=1"
it "should support the offset strategy" $ do
let p = Pagination (PageIndex 0) (PageSize 10) (ResourceCount 20)
let results = mkPaginationLinks OffsetStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
(snd . head) links `shouldBe` "/users?page%5blimit%5d=10&page%5boffset%5d=0"
it "should omit prev when we are on the first page of a PageStrategy" $ do
let p = Pagination (PageIndex 1) (PageSize 10) (ResourceCount 20)
let results = mkPaginationLinks PageStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
map fst links `shouldBe` ["first", "last", "next"]
it "should omit next when we are on the last page of a PageStrategy" $ do
let p = Pagination (PageIndex 2) (PageSize 10) (ResourceCount 20)
let results = mkPaginationLinks PageStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
map fst links `shouldBe` ["first", "last", "prev"]
it "should omit prev when we are on the first page of a OffsetStrategy" $ do
let p = Pagination (PageIndex 0) (PageSize 10) (ResourceCount 20)
let results = mkPaginationLinks OffsetStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
map fst links `shouldBe` ["first", "last", "next"]
it "should omit next when we are on the last page of a OffsetStrategy" $ do
let p = Pagination (PageIndex 1) (PageSize 10) (ResourceCount 20)
let results = mkPaginationLinks OffsetStrategy (fromJust $ importURL "/users") p
case results of
Links lm -> do
let links = toList lm
map fst links `shouldBe` ["first", "last", "prev"] | toddmohney/json-api | test/Network/JSONApi/PaginationSpec.hs | mit | 4,162 | 9 | 42 | 1,131 | 1,152 | 589 | 563 | 81 | 1 |
import Drawing
import Geometry
main = drawPicture myPicture
myPicture points =
drawPoints [a,b,c,d] &
drawLabels [a,b,c,d] ["A","B","C","D"] &
drawSegment (a,b) &
drawSegment (b,c) &
drawSegment (c,a) &
drawSegment (a,d) &
drawSegment (c,d)
where [a,c] = take 2 points
[b,d] = circle_circle (a,c) (c,a) | alphalambda/k12math | contrib/MHills/GeometryLessons/code/teacher/key_lesson7c.hs | mit | 347 | 0 | 12 | 87 | 184 | 103 | 81 | 13 | 1 |
module MiniSequel.Model
where
import MiniSequel.Expression
import MiniSequel
import Data.List (intercalate)
import Data.Data
import qualified Data.Map as Map
class SequelModel a where
createModel :: Model a
data SequelType =
SequelBoolean |
SequelInteger |
SequelVarchar Int |
SequelDate |
SequelDateTime |
SequelTimeStamp |
SequelTime |
SequelDouble |
SequelEnumeration [SequelExpression] |
SequelText |
SequelSerial |
SequelFKSerial
data SequelConstraintKey = SequelCUniqKey | SequelCPrimaryKey | SequelCForeignKey
data SequelField = SequelField {
_type :: SequelType,
_name :: SequelExpression,
_default :: Maybe SequelExpression,
_null :: Bool,
_primaryKey :: Bool,
_autoIncrement :: Bool,
_unique :: Bool,
_foreignKey :: Maybe SequelExpression
} | SequelConstraint SequelConstraintKey [SequelExpression]
data Model a = Model{
_name' :: SequelExpression,
_columns :: [SequelField],
_safeCreation :: Bool
}
ifNotExists m = m { _safeCreation = True}
table name cols = Model { _name' = name, _columns = cols, _safeCreation = False}
column :: SequelExpression -> SequelType -> SequelField
column name@(SequelSymbol _) type' = SequelField {
_type = type',
_name = name,
_default = Nothing,
_null = True,
_primaryKey = False,
_autoIncrement = False,
_unique = False,
_foreignKey = Nothing
}
notNull :: SequelField -> SequelField
notNull field = field { _null = False }
autoIncrement :: SequelField -> SequelField
autoIncrement field = field { _autoIncrement = True , _type = SequelSerial }
foreignKey :: SequelExpression -> SequelField -> SequelField
foreignKey refer field = field { _foreignKey = Just refer }
primaryKey :: SequelField -> SequelField
primaryKey field = field { _primaryKey = True }
default' :: SequelExpression -> SequelField -> SequelField
default' value field = field { _default = Just value}
unique :: SequelField -> SequelField
unique field = field { _unique = True }
uniqueKey :: [SequelExpression] -> SequelField
uniqueKey = SequelConstraint SequelCUniqKey
cPrimaryKey :: [SequelExpression] -> SequelField
cPrimaryKey = SequelConstraint SequelCPrimaryKey
showNull True = " NULL "
showNull False = " NOT NULL "
showDefault _ _ Nothing = ""
showDefault qi qs (Just val) = " DEFAULT "`mappend` showExpr qi qs val
showAutoIncrement False = ""
showAutoIncrement True = " AUTO_INCREMENT "
showPrimaryKey False = ""
showPrimaryKey True = " PRIMARY KEY "
showFields qi qs table fields = intercalate ", " $ fmap (showField qi qs table) fields
showType _ _ SequelInteger = "INTEGER"
showType _ _ SequelFKSerial = "BIGINT(20) UNSIGNED"
showType _ _ (SequelVarchar size) = "VARCHAR(" `mappend` show size `mappend` ")"
showType _ _ SequelDate = "DATE"
showType _ _ SequelDateTime = "DATETIME"
showType _ _ SequelTimeStamp = "TIMESTAMP"
showType _ _ SequelTime = "TIME"
showType _ _ SequelDouble = "DOUBLE"
showType _ _ SequelText = "TEXT"
showType _ _ SequelSerial = "SERIAL"
showType _ _ SequelBoolean = " BOOLEAN"
showType qi qs (SequelEnumeration values)= "ENUM(" `mappend` intercalate "," (fmap (showExpr qi qs) values) `mappend` ")"
showModel qi qs (Model name@(SequelSymbol _) fields safe) =
"CREATE TABLE " `mappend`
(if safe then " IF NOT EXISTS " else "") `mappend`
showExpr qi qs name `mappend`
"(" `mappend`
showFields qi qs name fields `mappend`
")"
showField qi qs (SequelSymbol tableName) (SequelConstraint SequelCUniqKey fields) = "UNIQUE KEY (" `mappend` intercalate ", " (fmap (showExpr qi qs) fields) `mappend` ")"
showField qi qs (SequelSymbol tableName) (SequelConstraint SequelCPrimaryKey fields) = "PRIMARY KEY (" `mappend` intercalate ", " (fmap (showExpr qi qs) fields) `mappend` ")"
showField qi qs (SequelSymbol tableName) (SequelField t name@(SequelSymbol nme) def nul pk ai uni fk) =
showExpr qi qs name `mappend`
" " `mappend`
showType qi qs t `mappend`
showNull nul `mappend`
showDefault qi qs def `mappend`
showAutoIncrement ai `mappend`
showPrimaryKey pk `mappend`
if uni then " UNIQUE " else "" `mappend`
case fk of
Nothing -> ""
Just (SequelSymbolOperation Access tabS@(SequelSymbol tab) colS@(SequelSymbol col)) ->
", CONSTRAINT " `mappend` showExpr qi qs (SequelSymbol $ "fk_" `mappend` tableName `mappend` "_" `mappend` nme) `mappend`
" FOREIGN KEY(" `mappend` showExpr qi qs name `mappend` ") " `mappend`
" REFERENCES " `mappend` showExpr qi qs tabS `mappend` "(" `mappend` showExpr qi qs colS `mappend` ")"
| TachoMex/MiniSequel | src/MiniSequel/Model.hs | mit | 4,758 | 0 | 25 | 1,045 | 1,390 | 774 | 616 | 109 | 3 |
-- Syntax
-- ref: https://wiki.haskell.org/Category:Syntax
-- Declaration vs. expression style
{-
Declaration style
Expression-style
where clause
let expression
Function arguments on left hand side: f x = x*x
Lambda abstraction: f = \x -> x*x
Pattern matching in function definitions: f [] = 0
case expression: f xs = case xs of [] -> 0
Guards on function definitions: f [x] | x>0 = 'a'
if expression: f [x] = if x>0 then 'a' else ...
-}
-- Direction of data flow
-- That's just infixr/infixl
{-
In Haskell the direction of data flow symbolized by the notations differs amongst the notations. Both directions occur equally frequently:
from left to right:
function definition f x = x*x (input left, output right)
Lambda \ x -> x*x
do notation do f; g
monadic composition f >>= g
let expression let x = 2 in x*x (first definition, then usage)
from right to left:
function application f x, f $ x (input right, applied function left)
composition g . f
results of monads do x <- f
monadic composition g =<< f
where clause x*x where x = 2 (first usage, then definition)
-}
-- Do notation considered harmful
{-
Didactics
-- The do notation hides functional details.
-- Since do notation is used almost everywhere IO takes place, newcomers quickly believe that the do notation is necessary for doing IO,
-- Newcomers might think that IO is somehow special and non-functional, in contrast to the advertisement for Haskell being purely functional,
-- Newcomers might think that the order of statements determines the order of execution.
-- The order of statements is also not the criterion for the evaluation order. Also here only the data dependencies count.
Library design
-- Unfortunately, the do notation is so popular that people write more things with monads than necessary. See for instance the Binary package. It contains the Put monad, which in principle has nothing to do with a monad. All "put" operations have the monadic result (). In fact it is a Writer monad using the Builder type, and all you need is just the Builder monoid. Even more unfortunate, the applicative functors were introduced to Haskell's standard libraries only after monads and arrows, thus many types are instances of Monad and Arrow classes, but not as many are instances of Applicative. There is no special syntax for applicative functors because it is hardly necessary.
Safety
-- The silent neglect of return values of functions. In an imperative language it is common to return an error code and provide the real work by side effects. In Haskell this cannot happen, because functions have no side effects. If you ignore the result of a Haskell function, the function will not even be evaluated.
-- The situation is different for IO: While processing the IO, you might still ignore the contained return value.
Additional combinators
-- Using the infix combinators for writing functions simplifies the addition of new combinators. Consider for instance a monad for random distributions. This monad cannot be an instance of MonadPlus, because there is no mzero (it would be an empty list of events, but their probabilities do not sum up to 1) and mplus is not associative because we have to normalize the sum of probabilities to 1. Thus we cannot use standard guard for this monad.
However we would like to write the following:
do f <- family
guard (existsBoy f)
return f
Given a custom combinator which performs a filtering with subsequent normalization called
(>>=?) :: Distribution a -> (a -> Bool) -> Distribution a
we can rewrite this easily:
family >>=? existsBoy
Note that the (>>=?) combinator introduces the risk of returning an invalid distribution (empty list of events),
Alternative combinators
If you are used to writing monadic functions using infix combinators (>>) and (>>=) you can easily switch to a different set of combinators.
This is useful when there is a monadic structure that does not fit into the current Monad type constructor class, where the monadic result type cannot be constrained.
This is e.g. useful for the Set data type, where the element type must have a total order.
Useful
Compare
mdo x <- f x y z
y <- g x y z
z <- h x y z
return (x+y+z)
and
mfix
(\ ~( ~(x,y,z), _) ->
do x <- f x y z
y <- g x y z
z <- h x y z
return ((x,y,z),x+y+z))
-}
-- Formatting function types
hPutBuf :: Handle -- handle to write to
-> Ptr a -- address of buffer
-> Int -- number of bytes of data in buffer
-> IO ()
-- If-Then-Else
-- Haskell without if-then-else syntax makes Haskell more logical and consistent. There is no longer confusion to beginners like: "What is so special about if-then-else, that it needs a separate syntax? I though it could be simply replaced by a function. Maybe there is some subtlety that I'm not able to see right now." There is no longer confusion with the interference of if-then-else syntax with do notation. Removing if-then-else simplifies every language tool, say compiler, text editor, analyzer and so on.
-- replace it by
if' :: Bool -> a -> a -> a
if' True x _ = x
if' False _ y = y
-- Let vs. Where
-- Let: better
f :: State s a
f = State $ \x ->
let y = ... x ...
in y
-- Where: better
f x
| cond1 x = a
| cond2 x = g a
| otherwise = f (h x a)
where
a = w x
-- vs.
f x
= let a = w x
in case () of
_ | cond1 x -> a
| cond2 x -> g a
| otherwise -> f (h x a)
f x =
let a = w x
in select (f (h x a))
[(cond1 x, a),
(cond2 x, g a)]
f x
= let a = w x
in if cond1 x
then a
else if cond2 x
then g a
else f (h x a)
-- One other approach to consider is that let or where can often be implemented using lambda lifting and let floating, incurring at least the cost of introducing a new name. The above example:
-- where can hide the CAF, whereas let explicitly show them
-- In the second case, fib' is redefined for every argument x. The compiler cannot know whether you intended this -- while it increases time complexity it may reduce space complexity. Thus it will not float the definition out from under the binding of x.
-- In contrast, in the first function, fib' can be moved to the top level by the compiler. The where clause hid this structure and made the application to x look like a plain eta expansion, which it is not.
fib =
let fib' 0 = 0
fib' 1 = 1
fib' n = fib (n - 1) + fib (n - 2)
in (map fib' [0 ..] !!)
fib x =
let fib' 0 = 0
fib' 1 = 1
fib' n = fib (n - 1) + fib (n - 2)
in map fib' [0 ..] !! x
-- List comprehension
-- No export lists
module Important (
{- * Important functions -}
foo,
{- * Important data types -}
Number(One,Two,Three)) where
{- | most important function -}
foo :: Int
foo = 2
{- | most important data type -}
data Number =
Zero
| One
| Two
| Three
| Many
-- Pattern guard
lookup :: FiniteMap -> Int -> Maybe Int
addLookup env var1 var2
| Just val1 <- lookup env var1
, Just val2 <- lookup env var2
= val1 + val2
{-...other equations...-}
-- will check to see if both lookups succeed, and bind the results to val1 and val2 before proceeding to use the equation.
-- Pronunciation
-- infix
-- partial: section
-- Syntax Suger
-- cons: https://wiki.haskell.org/Syntactic_sugar/Cons
-- pros: https://wiki.haskell.org/Syntactic_sugar/Pros
-- ref: https://wiki.haskell.org/ThingsToAvoid/Discussion
-- Terminator vs. separator
-- Terminator: There is one symbol after each element.
-- Separator: There is a symbol between each element. This is what the functions Data.List.intersperse and Data.List.unwords generate. In Haskell language, the following syntaxes allow separators only:
{-
Liberal choice between separators and terminators:
export lists: module A(a,b,c) where and module A(a,b,c,) where (and module A(a,b,c,,,) where ...)
import lists: import A(a,b,c) and import A(a,b,c,)
let syntax: let a = 'a'; b = 'b' in ... and let a = 'a'; b = 'b'; in ...
do syntax: do a ; b and do a; b;
-}
-- Unary operator
-- In Haskell there is only one unary operator, namely the unary minus. It has been discussed in length, whether the unary minus shall be part of numeric literals or whether it shall be an independent operator.
-- View pattern
-- ref: https://ghc.haskell.org/trac/ghc/wiki/ViewPatterns
type Typ
data TypView = Unit
| Arrow Typ Typ
view :: Typ -> TypView
-- additional operations for constructing Typ's ...
-- In current Haskell, using this signature is a little inconvenient: It is necessary to iterate the case, rather than using an equational function definition. And the situation is even worse when the matching against t is buried deep inside another pattern. In response, programmers sometimes eschew type abstraction in favor of revealing a concrete datatype that is easy to pattern-match against.
size :: Typ -> Integer
size t = case view t of
Unit -> 1
Arrow t1 t2 -> size t1 + size t2
-- View patterns permit calling the view function inside the pattern and matching against the result:
size (view -> Unit) = 1
size (view -> Arrow t1 t2) = size t1 + size t2
{-
Scoping for expr -> pat:
The variables bound by the view pattern are the variables bound by pat.
Any variables in expr are bound occurrences. Variables bound by patterns to the left of a view pattern expression are in scope. For example:
In function definitions, variables bound by matching earlier curried arguments may be used in view pattern expressions in later arguments.
example :: (String -> Integer) -> String -> Bool
example f (f -> 4) = True
Variables can be bound to the left in tuples and data constructors:
example :: ((String -> Integer,Integer), String) -> Bool
example ((f,_), f -> 4) = True
Typing If expr has type t1 -> t2 and pat matches a t2, then the whole view pattern has type t1.
Evaluation To match a value v against a pattern (expr -> pat), evaluate (expr v) and match the result against pat.
-}
-- Both pattern
-- A "both pattern" pat1 & pat2 matches a value against both pat1 and pat2 and succeeds only when they both succeed. A special case is as-patterns, x@p, where the first pattern is a variable. Both patterns can be programmed using view patterns:
both : a -> (a,a)
both x = (x,x)
f (both -> (xs, h : t)) = h : (xs ++ t)
-- As-pattern
-- x@p
-- N+k patterns
np :: Num a => a -> a -> Maybe a
np k n | k <= n = Just (n-k)
| otherwise = Nothing
fib :: Num a => a -> a
fib 0 = 1
fib 1 = 1
fib (np 2 -> Just n) = fib (n + 1) + fib n | Airtnp/Freshman_Simple_Haskell_Lib | Syntax.hs | mit | 11,025 | 11 | 16 | 2,729 | 1,000 | 522 | 478 | -1 | -1 |
module ModuleWithSpace where
moduleWithSpace :: Char
moduleWithSpace = ' '
| CementTheBlock/.vim | vim/bundle/ghcmod-vim/test/data/detect_module/ModuleWithSpace.hs | mit | 85 | 0 | 4 | 14 | 14 | 9 | 5 | 3 | 1 |
module Handler.System where
import Import
import System.Process (readProcess)
getSystemR :: Handler String
getSystemR = track "Handler.System.getSystemR" $
liftIO $ readProcess "df" ["-ih"] ""
| fpco/stackage-server | src/Handler/System.hs | mit | 199 | 0 | 7 | 29 | 53 | 29 | 24 | 6 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Algebra.CAS.DiffSpec (main, spec) where
import Test.Hspec
import Algebra.CAS.Base
import Algebra.CAS.Diff
main :: IO ()
main = hspec spec
x :: Formula
x = "x"
y :: Formula
y = "y"
z :: Formula
z = "z"
spec :: Spec
spec = do
describe "diff" $ do
it "diff(x+y,x)" $ do
diff (x+y) (x) `shouldBe` 1
it "diff(x*x+y,x)" $ do
diff (x*x+y) (x) `shouldBe` 2*x
it "diff(sin(x*x)+y+z,x)" $ do
diff (sin(x*x)+y+z) (x) `shouldBe` 2*cos(x*x)*x
it "diff(log(x),x)" $ do
diff (log(x)) (x) `shouldBe` 1/x
| junjihashimoto/th-cas | test/Algebra/CAS/DiffSpec.hs | mit | 577 | 0 | 22 | 134 | 283 | 150 | 133 | 24 | 1 |
{-# LANGUAGE Safe, GADTs, RankNTypes, ExistentialQuantification #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE FlexibleInstances #-}
module FP15.Evaluator.RuntimeError where
import Control.Monad.Except
import Text.PrettyPrint
import FP15.Disp
import FP15.Name
-- * Error and Diagnostics
data StackFrame = StackFrame (Maybe (Located String))
deriving (Eq, Ord, Show, Read)
newtype StackTrace = StackTrace [StackFrame]
deriving (Eq, Ord, Show, Read)
emptyStackTrace :: StackTrace
emptyStackTrace = StackTrace []
-- | An FP15 runtime error.
data RuntimeError = ContractViolation { contractViolated :: String
, offendingValue :: String
, stackTrace :: StackTrace }
| PassMismatchError { expectedLength :: Int
, actualLength :: Int
, stackTrace :: StackTrace }
| ErrorMessage { messageText :: String
, stackTrace :: StackTrace }
| EnvAccessError { offendingIndex :: Int
, stackTrace :: StackTrace }
--instance Except RuntimeError where
--strMsg s = ErrorMessage s emptyStackTrace
--noMsg = ErrorMessage "Runtime error." emptyStackTrace
instance Disp StackTrace where
pretty (StackTrace st) =
joinLines $ text "Stack Trace:" : map pretty (reverse st)
instance Disp StackFrame where
pretty (StackFrame func) =
nest 2 $ maybe (text "<func unknown>") pretty func
instance Disp RuntimeError where
pretty (ContractViolation c v st) =
joinLines [text "Contract Violation:",
text "Contract: " <> text (show c),
text "Value: " <> text (disp v),
pretty st]
pretty (PassMismatchError m n st) =
joinLines [text ("Pass: Arity mismatch: Expecting " ++ show m ++ " args"),
text ("but got " ++ show n ++ "."),
text (show st)]
pretty (ErrorMessage s st) =
joinLines [text "Error:" <+> text s, text (show st)]
pretty (EnvAccessError i st) =
joinLines [text "Env access error: " <+> text (show i),
text (show st)]
joinLines :: [Doc] -> Doc
joinLines = vcat
| Ming-Tang/FP15 | src/FP15/Evaluator/RuntimeError.hs | mit | 2,290 | 0 | 12 | 723 | 554 | 296 | 258 | 47 | 1 |
{-# LANGUAGE CPP #-}
module Data.Validation.Aeson where
import Control.Monad.Identity
import Data.Aeson
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LazyBS
#if MIN_VERSION_aeson(2,0,0)
import qualified Data.Aeson.Key as Key
import qualified Data.Aeson.KeyMap as KeyMap
#else
import qualified Data.HashMap.Strict as HashMap
#endif
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Data.Vector as Vec
import Data.Validation.Types
decodeValidJSON :: Validator a -> LazyBS.ByteString -> ValidationResult a
decodeValidJSON validator input =
runIdentity (decodeValidJSONT (liftV validator) input)
decodeValidJSONStrict :: Validator a -> BS.ByteString -> ValidationResult a
decodeValidJSONStrict validator input =
runIdentity (decodeValidJSONStrictT (liftV validator) input)
decodeValidJSONT :: Applicative m
=> ValidatorT m a -> LazyBS.ByteString -> m (ValidationResult a)
decodeValidJSONT validator input =
case eitherDecode input of
Left err -> pure $ Invalid (errMessage $ Text.pack err)
Right value -> runValidatorT validator (value :: Value)
decodeValidJSONStrictT :: Applicative m
=> ValidatorT m a -> BS.ByteString -> m (ValidationResult a)
decodeValidJSONStrictT validator input =
case eitherDecodeStrict input of
Left err -> pure $ Invalid (errMessage $ Text.pack err)
Right value -> runValidatorT validator (value :: Value)
instance Validatable Value where
inputText (String text) = Just text
inputText _ = Nothing
inputNull Null = IsNull
inputNull _ = NotNull
inputBool (Bool True) = Just True
inputBool (Bool False) = Just False
inputBool _ = Nothing
arrayItems (Array items) = Just items
arrayItems _ = Nothing
scientificNumber (Number sci) = Just sci
scientificNumber _ = Nothing
lookupChild attrName (Object hmap) = LookupResult $
#if MIN_VERSION_aeson(2,0,0)
KeyMap.lookup (Key.fromText attrName) hmap
#else
HashMap.lookup attrName hmap
#endif
lookupChild _ _ = InvalidLookup
instance ToJSON Errors where
toJSON (Messages set) = Array
. Vec.fromList
. map toJSON
. Set.toList
$ set
toJSON (Group attrs) = Object
#if MIN_VERSION_aeson(2,0,0)
. KeyMap.fromList
#else
. HashMap.fromList
#endif
. Map.toList
#if MIN_VERSION_aeson(2,0,0)
. Map.mapKeys Key.fromText
#endif
. Map.map toJSON
$ attrs
| flipstone/redcard | src/Data/Validation/Aeson.hs | mit | 2,769 | 0 | 13 | 760 | 683 | 358 | 325 | 56 | 2 |
module Section ( Section, load, flatten ) where
import Control.Applicative
import Control.Monad.Loops
import Data.Focus
import Data.Maybe
import Data.Scope
import Data.Tree hiding ( flatten )
import Data.Tree.Zipper
import System.Directory
import System.FilePath
import System.FilePath.Utils
import Section.Isolate ( Isolate )
import Section.Variables ( variables )
import qualified Section.Isolate as Isolate
import qualified Section.Info as Info
import qualified Path
import Text.Pandoc ( Pandoc )
import qualified Target.Pandoc as Pandoc
type Section = TreePos Full Isolate
load :: FilePath -> String -> Scope -> IO Section
load dir name scope = do
i <- Isolate.create dir name
discover dir scope (fromTree $ Node i [])
discover :: FilePath -> Scope -> Section -> IO Section
discover dir scope z = do
let dir' = Isolate.path dir (label z)
let pipe s hole = maybe hole nextSpace <$> discover' dir' scope s hole
subs <- subsections dir z
discovered <- concatM (map pipe subs) (children z)
return $ fromMaybe z $ parent discovered
discover' :: FilePath -> Scope -> Isolate -> TreePos Empty Isolate -> IO (Maybe Section)
discover' dir scope s z = do
let z' = insert (Node s []) z
let proceed = scope `contains` Info.location z'
if proceed then Just <$> discover dir scope z' else return Nothing
type Renderer = (Pandoc -> String)
type Expander = (Focus -> [(String, String)] -> String)
flatten :: String -> Section -> Renderer -> Expander -> String
flatten t z render expand = expand loc vars where
i = label z
fmt = Pandoc.writerName $ Isolate.name i
body = case Isolate.body i of
Just text -> render $ Pandoc.parse fmt text
Nothing -> flatten' render expand (children z) ""
vars = [("title", t), ("body", body)] ++ variables z
loc = Info.location z
flatten' :: Renderer -> Expander -> TreePos Empty Isolate -> ShowS
flatten' render expand z = case nextTree z of
Just c -> let
t = Path.title $ Isolate.name $ label c
ours = flatten t c render expand
rest = flatten' render expand $ nextSpace c
in showString ours . (showString "\n") . rest
Nothing -> showString ""
subsections :: FilePath -> Section -> IO [Isolate]
subsections path z = do
let dir = path </> Isolate.name (label z)
there <- doesDirectoryExist dir
if not there then return [] else do
files <- ls dir
mapM (uncurry Isolate.create . splitFileName) files
| Soares/Bookbuilder | src/Section.hs | mit | 2,470 | 0 | 14 | 541 | 917 | 467 | 450 | 61 | 2 |
module MipsInterpreter where
import Control.Monad.State.Lazy
import Data.Word
import Data.Int
import Data.Bits
data MipsState = MipsState { intRegisters :: [] Int32,
floatRegisters :: [] Double,
memory :: [] Word8
} deriving (Show)
data MipsEnv a = State MipsState
initState :: MipsEnv ()
initState = do
state <- get
let iregs = intRegisters state
put state { intRegisters = replicate 32 0, memory = replicate 640000 0 }
setReg :: Int -> Int32 -> MipsEnv ()
setReg tr tv = do
state <- get
let regs = intRegisters state
put state { intRegisters = (take tr regs) ++ [tv] ++ (drop (tr+1) regs) }
return ()
getReg :: Int -> MipsEnv Int32
getReg sr = do
state <- get
let regs = intRegisters state
return (regs !! sr)
-- works as store word
-- MIPS is (according to Oleks big endian)
setMem :: Int -> Int32 -> MipsEnv ()
setMem ta tv = do
mipsEnv <- get
let
fi = fromIntegral :: Int32 -> Word8
mem = memory mipsEnv
regsI = intRegisters mipsEnv
regsF = floatRegisters mipsEnv
r3 = fi $ tv `mod` (1 `shift` 8 ) -- least significant
r2 = fi $ (tv `shift` (-8)) `mod` (1 `shift` 8)
r1 = fi $ (tv `shift` (-8*2)) `mod` (1 `shift` 8)
r0 = fi $ (tv `shift` (-8*3)) `mod` (1 `shift` 8)
in do
put mipsEnv { intRegisters = regsI,
floatRegisters = regsF,
memory = ((take ta mem) ++ [r0,r1,r2,r3] ++ (drop (ta+4) mem)) }
| Sword-Smith/hfasto | src/MipsInterpreter.hs | mit | 1,523 | 0 | 19 | 464 | 605 | 329 | 276 | 42 | 1 |
module NightClubSpec where
import Test.Hspec
import Control.Exception (evaluate)
import NightClub
import Text.Show.Functions
import Data.List
---
testAll :: IO ()
testAll = hspec $ do
----------------------------------------------------------
--- TESTS PRIMERA PARTE TP FUNCIONAL ---
----------------------------------------------------------
describe "[TP Nº 1] Verificar puntos 1 y 2] " $ do
it "Nombre de Rodri debe ser 'Rodri'" $ do
(nombre rodri) `shouldBe` "Rodri"
it "Resistencia de Ana debe ser 120" $ do
(resistencia ana) `shouldBe` 120
it "Amigos de Ana deben ser [rodri]" $ do
(amigos marcos) `shouldMatchList` [rodri]
describe "[TP Nº 1] Verificar punto 3] " $ do
it "Cristian debe estar 'duro'" $ do
(comoEsta cristian) `shouldBe` "duro"
it "Rodri debe estar 'fresco'" $ do
(comoEsta rodri) `shouldBe` "fresco"
it "Marcos debe estar 'duro'" $ do
(comoEsta marcos) `shouldBe` "duro"
it "Si Marcos se hace amigo de Ana y Rodri, está 'piola'" $ do
(comoEsta . reconocerAmigo rodri . reconocerAmigo ana) marcos `shouldBe` "piola"
describe "[TP Nº 1] Verificar punto 4] " $ do
it "Cristian reconoce a Marcos como amigo" $ do
(amigos . reconocerAmigo marcos) cristian `shouldMatchList` [marcos]
it "Cristian no puede reconocerse a si mismo como amigo" $ do
reconocerAmigo cristian cristian `shouldBe` cristian
it "Cristian no puede reconocerse a Marcos dos veces como amigo" $ do
(reconocerAmigo marcos . reconocerAmigo marcos) cristian `shouldBe` cristian
describe "[TP Nº 1] Verificar punto 5] " $ do
it "Ana toma grogXD. Queda con resistencia 0" $ do
(resistencia . tomarGrogXD) ana `shouldBe` 0
it "Si Ana toma la jarraLoca. Marcos queda con resistencia 30 (-10)" $ do
(resistencia . head . amigos . tomarJarraLoca) ana `shouldBe` 30
it "Marcos toma la jarraLoca. Queda con resistencia 30" $ do
(resistencia . tomarJarraLoca) marcos `shouldBe` 30
it "Rodri toma la jarraLoca. Queda con resistencia 110" $ do
(resistencia . tomarJarraLoca) rodri `shouldBe` 45
it "Si Ana toma klusener de Huevo disminuye se resistencia a 50 (-5)" $ do
(resistencia . tomarKlusener "Huevo") ana `shouldBe` 115
it "Si Ana toma klusener de Chocolate disminuye se resistencia a 50 (-8)" $ do
(resistencia . tomarKlusener "Chocolate") ana `shouldBe` 111
it "Si Cristian toma un tintico, queda con 2 de resistencia por no tener" $ do
(resistencia . tomarTintico) cristian `shouldBe` 2
it "Ana toma un Tintico, pasa a 130 de resistencia (tiene 2 amigos)" $ do
(resistencia . tomarTintico) ana `shouldBe` 130
it "Rodri toma una Soda de fuerza 2, queda con nombre 'errpRodri'" $ do
(nombre . tomarSoda 2) rodri `shouldBe` "errpRodri"
it "Ana toma una Soda de fuerza 10, queda con nombre 'errrrrrrrrrpAna'" $ do
(nombre . tomarSoda 10) ana `shouldBe` "errrrrrrrrrpAna"
it "Ana toma una Soda de fuerza 0, queda con nombre 'epAna'" $ do
(nombre . tomarSoda 0) ana `shouldBe` "epAna"
describe "[TP Nº 1] Verificar punto 6] " $ do
it "Si Rodri se rescata por 5 horas debería tener 255 de resistencia" $ do
(resistencia . rescatarse 5) rodri `shouldBe` 255
it "Si Marcos se rescata por 3 horas debería tener 140 de resistencia" $ do
(resistencia . rescatarse 3) marcos `shouldBe` 140
it "Si Cristian se rescata por 1 horas debería tener 155 de resistencia" $ do
(resistencia . rescatarse 1) rodri `shouldBe` 155
it "Si Ana se rescata por 0 horas debería aparecer un error" $ do
evaluate ( (resistencia . rescatarse 0) ana ) `shouldThrow` anyException
it "Si Ana se rescata por menos de 0 horas debería aparecer un error" $ do
evaluate ( (resistencia . rescatarse (-1)) ana ) `shouldThrow` anyException
----------------------------------------------------------
--- TESTS SEGUNDA PARTE TP FUNCIONAL ---
----------------------------------------------------------
describe "[TP Nº 2] Verificar punto 1.a:" $ do
it "Rodri tomó un tintico" $ do
tomarTragos (tragos rodri) rodri `shouldBe` tomarTintico rodri
describe "[TP Nº 2] Verificar punto 1.b:" $ do
it "Rodri tomó un tintico" $ do
(length . tragos . tomarGrogXD) ana `shouldBe` 1
it "Marcos toma una soda de nivel 3 y queda con 2 bebidas" $ do
(length . tragos . tomarSoda 3) marcos `shouldBe` 2
it "Marcos toma una soda de nivel 3 y queda con 40 de resistencia" $ do
(resistencia . tomarSoda 3) marcos `shouldBe` 40
let anaElDiaDespues = tomarTragos [tomarJarraLoca, tomarKlusener "Chocolate", rescatarse 2, tomarKlusener "Huevo"] ana
describe "[TP Nº 2] Verificar punto 1.c:" $ do
it "Ana toma una jarra loca, un klusener de chocolate, se rescata 2 horas y luego toma un klusener de huevo: Su resistencia queda en 196" $ do
resistencia anaElDiaDespues `shouldBe` 196
it "Ana toma una jarra loca, un klusener de chocolate, se rescata 2 horas y luego toma un klusener de huevo: Quedan como amigos Marcos (30 de resistencia) y Rodri (45 de resistencia)" $ do
(map resistencia . amigos) anaElDiaDespues `shouldBe` [30, 45]
it "Ana toma una jarra loca, un klusener de chocolate, se rescata 2 horas y luego toma un klusener de huevo: En su lista de tragos ahora hay 3 elementos" $ do
(length . tragos) anaElDiaDespues `shouldBe` 3
it "Rodri toma una soda de nivel 1 y una soda de nivel 2 y queda con nombre errperpRodri" $ do
(nombre . tomarSoda 2 . tomarSoda 1) rodri `shouldBe` "errperpRodri"
it "Marcos toma un klusener de huevo, un tintico y una jarraLoca y queda con 30 de resistencia" $ do
(resistencia . tomarJarraLoca . tomarTintico . tomarKlusener "Huevo" ) marcos `shouldBe` 30
it "Marcos toma un klusener de huevo, un tintico y una jarraLoca y queda con 4 bebidas en el historial" $ do
(length . tragos . tomarJarraLoca . tomarTintico . tomarKlusener "Huevo" ) marcos `shouldBe` 4
describe "[TP Nº 2] Verificar punto 1.d:" $ do
it "Marcos pide “dame otro” y lo deja con 34 de resistencia" $ do
(resistencia . dameOtro) marcos `shouldBe` 34
it "Marcos pide “dame otro” y tiene 2 bebidas en el historial" $ do
(length . tragos . dameOtro) marcos `shouldBe` 2
it "Ana pide `dameOtro`, debe dar error" $ do
evaluate (dameOtro ana) `shouldThrow` anyException
it "Rodri toma una soda de nivel 1, y “dame otro” da como resultado que tiene 3 bebidas" $ do
(length . tragos . dameOtro . tomarSoda 1) rodri `shouldBe` 3
it "Rodri toma una soda de nivel 1, y “dame otro” da como resultado que su nombre queda “erperpRodri”" $ do
(nombre . dameOtro . tomarSoda 1) rodri `shouldBe` "erperpRodri"
describe "[TP Nº 2] Verificar punto 2: `cuantasPuedeTomar`" $ do
it "Rodri puede tomar dos tragos, entre un grogXD, un tintico y un klusener de frutilla" $ do
cuantasPuedeTomar [tomarGrogXD, tomarTintico, tomarKlusener "Frutilla"] rodri `shouldBe` 2
it "Cristian no puede tomar un trago, entre un tintico y un klusener de huevo" $ do
cuantasPuedeTomar [tomarGrogXD, tomarTintico, tomarKlusener "Huevo"] cristian `shouldBe` 1
it "Rodri puede tomar una sola bebida entre un grog XD, un tintico, un klusener de fru..utilla" $ do
cuantasPuedeTomar [tomarGrogXD, tomarTintico, tomarKlusener "fruuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuutilla"] rodri `shouldBe` 1
describe "[TP Nº 2] Verificar punto 3:" $ do
it "Rodri realiza una salida de amigos, debe quedar con 1 amigo" $ do
(length . amigos . realizarItinerario salidaDeAmigos) rodri `shouldBe` 1
it "Rodri hace una salida de amigos y se debe llamar “erpRodri”" $ do
(nombre . realizarItinerario salidaDeAmigos) rodri `shouldBe` "erpRodri"
it "Rodri realiza una salida de amigos, debe quedar con 45 de Resistencia" $ do
(resistencia . realizarItinerario salidaDeAmigos) rodri `shouldBe` 45
it "Rodri realiza una salida de amigos, ahora es amigo de Roberto Carlos" $ do
(amigos . realizarItinerario salidaDeAmigos) rodri `shouldSatisfy` elem robertoCarlos
it "Rodri realiza una salida de amigos, su amigo Roberto Carlos debe quedar con 155 de resistencia" $ do
(resistencia . head . amigos . realizarItinerario salidaDeAmigos) rodri `shouldBe` 155
it "Rodri realiza una salida de amigos, debe quedar con 4 bebidas en su historial" $ do
(length . tragos . realizarItinerario salidaDeAmigos) rodri `shouldBe` 4
describe "[TP Nº 2] Verificar punto 4.a:" $ do
it "La intensidad del itinerario basico es 0.8" $ do
intensidadItinerario itinerarioBasico `shouldBe` 0.8
it "La intensidad de la mezcla explosiva es 1.6" $ do
intensidadItinerario mezclaExplosiva `shouldBe` 1.6
it "La intensidad de la salidaDeAmigos es 4.0" $ do
intensidadItinerario salidaDeAmigos `shouldBe` 4.0
describe "[TP Nº 2] Verificar punto 4.b:" $ do
it "El itinerario más intenso, de los conocidos, es la salida de amigos" $ do
(descripcion . itinerarioMasIntenso) [salidaDeAmigos, mezclaExplosiva, itinerarioBasico] `shouldBe` "Salida de amigos"
it "Rodri hace el itinerario más intenso entre una salida de amigos, la mezcla explosiva y el itinerario básico y queda con el nombre 'erpRodri'" $ do
(nombre . realizarItinerario ((itinerarioMasIntenso) [salidaDeAmigos, mezclaExplosiva, itinerarioBasico])) rodri `shouldBe` "erpRodri"
it "Rodri hace el itinerario más intenso entre una salida de amigos, la mezcla explosiva y el itinerario básico y queda con resistencia 45" $ do
(resistencia . realizarItinerario ((itinerarioMasIntenso) [salidaDeAmigos, mezclaExplosiva, itinerarioBasico])) rodri `shouldBe` 45
it "Rodri hace el itinerario más intenso entre una salida de amigos, la mezcla explosiva y el itinerario básico y queda con un amigo (Roberto Carlos)" $ do
(map nombre . amigos . realizarItinerario ((itinerarioMasIntenso) [salidaDeAmigos, mezclaExplosiva, itinerarioBasico])) rodri `shouldBe` ["Roberto Carlos"]
describe "[TP Nº 2] Verificar punto 5:" $ do
it "La resistencia de Chuck es mayor a la de Ana" $ do
resistencia chuckNorris `shouldSatisfy` (> resistencia ana)
it "Chuck realiza un itinerario basico, al finalizar su resistencia es 1076" $ do
(resistencia . realizarItinerario itinerarioBasico) chuckNorris `shouldBe` 1076
describe "[TP Nº 2] Verificar punto 6:" $ do
it "Roberto Carlos se hace amigo de Ana, toma una jarra popular de espirituosidad 0, sigue teniendo una sola amiga (Ana)" $ do
(length . amigos . tomarJarraPopular 0 . reconocerAmigo ana) robertoCarlos `shouldBe` 1
it "Roberto Carlos se hace amigo de Ana, toma una jarra popular de espirituosidad 1, ganó dos amigos (3)" $ do
(length . amigos . tomarJarraPopular 1 . reconocerAmigo ana) robertoCarlos `shouldBe` 3
it "Roberto Carlos se hace amigo de Ana, toma una jarra popular de espirituosidad 1, sus nuevos amigos son rodri y marcos" $ do
(map nombre . amigos . tomarJarraPopular 1 . reconocerAmigo ana) robertoCarlos `shouldBe` ["Marcos", "Rodri", "Ana"]
it "Cristian se hace amigo de Ana. Roberto Carlos se hace amigo de Cristian, toma una jarra popular de espirituosidad 4, queda con 4 amigos (Cristian, Ana, Marcos y Rodri)" $ do
(map nombre . amigos . tomarJarraPopular 4 . reconocerAmigo (reconocerAmigo ana cristian)) robertoCarlos `shouldBe` ["Marcos","Rodri","Ana","Cristian"] | emanuelcasco/tp-paradigmas-funcional | test/NightClubSpec.hs | mit | 11,765 | 0 | 22 | 2,556 | 2,412 | 1,156 | 1,256 | 150 | 1 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.CommandLineAPIHost
(clearConsoleMessages, copyText, inspect, inspectedObject,
inspectedObject_, getEventListeners, getEventListeners_,
databaseId, databaseId_, storageId, storageId_,
CommandLineAPIHost(..), gTypeCommandLineAPIHost)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.clearConsoleMessages Mozilla CommandLineAPIHost.clearConsoleMessages documentation>
clearConsoleMessages :: (MonadDOM m) => CommandLineAPIHost -> m ()
clearConsoleMessages self
= liftDOM (void (self ^. jsf "clearConsoleMessages" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.copyText Mozilla CommandLineAPIHost.copyText documentation>
copyText ::
(MonadDOM m, ToJSString text) => CommandLineAPIHost -> text -> m ()
copyText self text
= liftDOM (void (self ^. jsf "copyText" [toJSVal text]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.inspect Mozilla CommandLineAPIHost.inspect documentation>
inspect ::
(MonadDOM m, ToJSVal objectId, ToJSVal hints) =>
CommandLineAPIHost -> objectId -> hints -> m ()
inspect self objectId hints
= liftDOM
(void (self ^. jsf "inspect" [toJSVal objectId, toJSVal hints]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.inspectedObject Mozilla CommandLineAPIHost.inspectedObject documentation>
inspectedObject :: (MonadDOM m) => CommandLineAPIHost -> m JSVal
inspectedObject self
= liftDOM ((self ^. jsf "inspectedObject" ()) >>= toJSVal)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.inspectedObject Mozilla CommandLineAPIHost.inspectedObject documentation>
inspectedObject_ :: (MonadDOM m) => CommandLineAPIHost -> m ()
inspectedObject_ self
= liftDOM (void (self ^. jsf "inspectedObject" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.getEventListeners Mozilla CommandLineAPIHost.getEventListeners documentation>
getEventListeners ::
(MonadDOM m, IsNode node) => CommandLineAPIHost -> node -> m Array
getEventListeners self node
= liftDOM
((self ^. jsf "getEventListeners" [toJSVal node]) >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.getEventListeners Mozilla CommandLineAPIHost.getEventListeners documentation>
getEventListeners_ ::
(MonadDOM m, IsNode node) => CommandLineAPIHost -> node -> m ()
getEventListeners_ self node
= liftDOM (void (self ^. jsf "getEventListeners" [toJSVal node]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.databaseId Mozilla CommandLineAPIHost.databaseId documentation>
databaseId ::
(MonadDOM m, ToJSVal database, FromJSString result) =>
CommandLineAPIHost -> database -> m result
databaseId self database
= liftDOM
((self ^. jsf "databaseId" [toJSVal database]) >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.databaseId Mozilla CommandLineAPIHost.databaseId documentation>
databaseId_ ::
(MonadDOM m, ToJSVal database) =>
CommandLineAPIHost -> database -> m ()
databaseId_ self database
= liftDOM (void (self ^. jsf "databaseId" [toJSVal database]))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.storageId Mozilla CommandLineAPIHost.storageId documentation>
storageId ::
(MonadDOM m, ToJSVal storage, FromJSString result) =>
CommandLineAPIHost -> storage -> m result
storageId self storage
= liftDOM
((self ^. jsf "storageId" [toJSVal storage]) >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CommandLineAPIHost.storageId Mozilla CommandLineAPIHost.storageId documentation>
storageId_ ::
(MonadDOM m, ToJSVal storage) =>
CommandLineAPIHost -> storage -> m ()
storageId_ self storage
= liftDOM (void (self ^. jsf "storageId" [toJSVal storage]))
| ghcjs/jsaddle-dom | src/JSDOM/Generated/CommandLineAPIHost.hs | mit | 4,996 | 0 | 12 | 743 | 1,084 | 605 | 479 | 74 | 1 |
{-
Tools for importing / exporting images to pathfinding maps. Uses the
friday-devil package.
-}
{-# LANGUAGE ScopedTypeVariables #-}
import System.Environment (getArgs)
import Criterion.Main
import System.Directory (doesFileExist, removeFile)
import Control.Monad (when)
import qualified Data.Vector as Vector
import qualified Data.Vector.Storable as Storable
import qualified Data.Vector.Unboxed as Unboxed
import qualified Data.Map as Map
import Data.List
import Debug.Trace
import Options.Applicative
import Data.Semigroup ((<>))
import qualified Vision.Image as Image
import Vision.Image.Storage.DevIL (Autodetect (..), PNG (..), load, save)
import Vision.Primitive (ix2)
import Vision.Primitive.Shape
import Grid
import qualified JPS as JPS
import qualified Astar as Astar
main::IO ()
main =
do
-- TODO : make an actual command-line interface instead of hardcoding :)
-- [input, output] <- getArgs
{-
input <- return "../maps/AR0011SRBW.jpg"
start <- return (Coord 414 418)
finish <- return (Coord 253 130)
-}
{-
input <- return "../maps/AR0017SRBW.jpg"
start <- return (Coord 70 54)
finish <- return (Coord 43 42)
-}
input <- return "../maps/CalderaBW.jpg"
finish <- return (Coord 140 511)
start <- return (Coord 572 56)
{-
-}
output <- return "./testout.png"
--finish <- return (Coord 420 393)
pathfindImage input output start finish
-- Do a pathfinding operation on the image at pathIn; save the
-- resulting image to pathOut.
pathfindImage :: FilePath -> FilePath -> Coord -> Coord -> IO ()
pathfindImage pathIn pathOut startc finishc = do
io <- load Autodetect pathIn
case io of
Left err -> do
putStrLn ("Unable to load image at path: " ++ pathIn)
print err
Right (rgb :: Image.RGB) -> do
let
grid = imageToGrid rgb
dim = dims grid
start = c2i dim startc
finish = c2i dim finishc
{-
defaultMain [
bench "searchAstar" $ whnf (\x -> Astar.findPathNormal x start finish) grid
]
-}
(path, visited) <- return (Astar.findPathNormal grid start finish)
{-
defaultMain [
bench "searchJPS" $ whnf (\x -> JPS.findPathJPS x start finish) grid
]
(path, visited) <- return (JPS.findPathJPS grid start finish)
-}
grid' <- return
$ (markStartFinish start finish)
$ (markPath path)
$ (markVisited visited)
$ grid
case path of
[] -> do
putStrLn("No path found!")
otherwise -> do
putStrLn("Found a path!")
image <- return (gridToImage grid')
outExists <- doesFileExist pathOut
when outExists (removeFile pathOut)
mErr <- save Autodetect pathOut image
case mErr of
Nothing -> return ()
Just err -> do
putStrLn $ "ERROR - could not save image to path: " ++ pathOut
print err
-- Convert coordinates to / from the Shape format used by Vision (we only need 2d coordinates :) )
dim2ToCoord (((d0 :. d1) :: DIM1) :. d2) = (Coord d1 d2)
-- Convert a coordinate back to a Shape
coordToDim2 :: Coord -> DIM2
coordToDim2 (Coord x y) = ix2 x y
-- Convert a colored pixel into the corresponding square type
colorToSquare :: Image.RGBPixel -> Square
colorToSquare (Image.RGBPixel r g b) = if (r < 10) && (b < 10) && (g < 10) then 0 else 1
-- Convert a square to a nice colorful pixel
squareToColor :: Square -> Image.RGBPixel
squareToColor sq = case sq of
0 -> (Image.RGBPixel 0 0 0) -- Blocked
1 -> (Image.RGBPixel 255 255 255) -- Open
2 -> (Image.RGBPixel 255 0 255) -- Start
3 -> (Image.RGBPixel 0 0 255) -- Finish
4 -> (Image.RGBPixel 244 158 66) -- Visited
5 -> (Image.RGBPixel 66 212 244) -- On Path
-- Convert an RGB image into a Grid
imageToGrid :: Image.RGB -> Grid
imageToGrid (Image.Manifest manifestSize manifestVector) =
let
image = Storable.toList manifestVector
(Coord x y) = dim2ToCoord manifestSize
dims = (GridDims x y)
squares = map colorToSquare image
squaresWithIndices = zip [0..] squares
squaresWithCoords = [(dim2ToCoord (fromLinearIndex manifestSize index), sq)
| (index, sq) <- squaresWithIndices]
grid = Unboxed.fromList squares
in Grid dims grid
-- Convert a grid to a corresponding image
gridToImage :: Grid -> Image.RGB
gridToImage (Grid (GridDims dimx dimy) grid) =
let
gridshape = coordToDim2 (Coord dimx dimy)
pixels = map squareToColor (Unboxed.toList grid)
in Image.Manifest gridshape (Storable.fromList pixels)
markVisited :: Map.Map Int Int -> Grid -> Grid
markVisited visited (Grid dims sqs) =
let
sqs' = Unboxed.imap (\ i x -> if Map.member i visited then 4 else x) sqs
in
(Grid dims sqs')
markPath :: [Int] -> Grid -> Grid
markPath path (Grid dims sqs) =
let
zipped = [(i, 1) | i <- path]
pathset = Map.fromList zipped
sqs' = Unboxed.imap (\ i x -> if Map.member i pathset then 5 else x) sqs
in
(Grid dims sqs')
markStartFinish :: Int -> Int -> Grid -> Grid
markStartFinish start finish (Grid dims sqs) =
let
sqs' = Unboxed.imap (markSF start finish) sqs
in
(Grid dims sqs')
markSF :: Int -> Int -> Int -> Square -> Square
markSF start finish i x | i == start = 2
| i == finish = 3
| otherwise = x
| hacoo/haskell-jps | pathfinding/Pathfinding.hs | mit | 5,657 | 1 | 18 | 1,632 | 1,488 | 773 | 715 | 113 | 6 |
-- Module: BaseN
-- Copyright: (c)2014
-- License: Apache
-- Maintainer: Brian Sunter <[email protected]>
-- This module converts between base 10 and base 1114015.
module BaseN
(BaseN) where
import Data.List(elemIndex,all,genericLength)
import Data.Maybe(fromJust,isJust)
base10CharSet = ['0'..'9']
data BaseN = BaseN [Char] Int
instance Num BaseN where
(BaseN c x) + (BaseN c' y) = (BaseN c (x + y))
(BaseN c x) * (BaseN c' y) = (BaseN c (x * y))
abs (BaseN c x) = BaseN c (abs x)
signum (BaseN c x) = BaseN c (signum x)
negate (BaseN c x) = BaseN c (negate x)
instance Show BaseN where
show (BaseN c x) = encodeBaseN c x
readBaseN :: [Char] -> [Char] -> (Maybe Int)
readBaseN c x =
let nums = [elemIndex y c | y <- x ]
in if all isJust nums then Just (foldl (\acc x -> acc + 2 * fromJust x) 0 nums) else Nothing
encodeBaseN :: [Char] -> Int-> [Char]
encodeBaseN c i
| i < clen = [c !! i]
| otherwise = encodeBaseN c nextIndex ++ [(c !! index)]
where clen = (length c)
index = (mod i clen)
nextIndex = (div (i - index) clen)
| brsunter/Base1M | src/BaseN/BaseN.hs | mit | 1,115 | 0 | 14 | 282 | 498 | 261 | 237 | 25 | 2 |
module Solution1 where
import Problem
import Math
import Clojure
import Data.List
import Data.Char
eul1 lim = sum [x | x <- [1..(pred lim)] , or [(0 == rem x 3),(0 == rem x 5)]]
eul2 lim = iter 1 1
where
iter a b
| a > lim = 0
| even a = a + (iter b (b + a))
| otherwise = iter b (b + a)
eul3 x = iter 2 x
where
iter p temp
| d == 1 = p
| otherwise = iter (nextPrime p) d
where
d = divUntil temp p
eul4 _ = maximum [p | x <- [100..1000], y <- [100..1000], let p = x*y, isPalin p]
eul5 lim = reduce lcm [2..lim]
eul6 lim = squaresum - sumsquare
where
squaresum = sq (reduce (+) [1..lim])
sumsquare = reduce (+) [sq x | x <- [1..lim]]
eul7 x = nthPrime x
eul9 s = [a*b*c | c <- [1..s], b <- [1..c], a <- [1..b], a+b+c == s, a^2 + b^2 == c^2]
-- work but really naive and dumb
eul10 lim = iter 2
where
iter p
| p > lim = 0
| otherwise = p + iter (nextPrime p)
| skadinyo/conc | haskell/solution1.hs | epl-1.0 | 951 | 0 | 12 | 295 | 557 | 281 | 276 | 28 | 1 |
module Networkie.Rect where
import Networkie.Coord
data Rect = Rect {
rTopLeft :: Coord
, rDim :: Coord
}
rBottomRight :: Rect -> Coord
rBottomRight (Rect p d) = p `pairPlus` d
mkRectPosDim :: Coord -> Dim -> Rect
mkRectPosDim = Rect
mkRectFromCorners :: Coord -> Coord -> Rect
mkRectFromCorners p1 p2 = Rect p1 (p2 `pairMinus` p1)
insideRect :: Rect -> Coord -> Bool
insideRect (Rect (tlx,tly) (w,h)) (x,y) = x >= tlx && y >= tly && x < (tlx+w) && y < (tly+h)
| pmiddend/networkie | src/Networkie/Rect.hs | gpl-2.0 | 475 | 0 | 11 | 99 | 209 | 117 | 92 | 13 | 1 |
{-| Scans page of Markdown looking for http links. When it finds them, it submits them
to webcitation.org / https://secure.wikimedia.org/wikipedia/en/wiki/WebCite
(It will also submit them to Alexa (the source for the Internet Archive), but Alexa says that
its bots take weeks to visit and may not ever.)
Limitations:
* Only parses Markdown, not ReST or any other format; this is because 'readMarkdown'
is hardwired into it.
By: Gwern Branwen; placed in the public domain -}
module WebArchiver (plugin) where
import Control.Concurrent (forkIO, ThreadId)
import Control.Monad (when)
import Control.Monad.Trans (MonadIO)
import Data.Maybe (fromJust)
import Network.Browser (browse, formToRequest, request, Form(..))
import Network.HTTP (getRequest, rspBody, simpleHTTP, RequestMethod(POST))
import Network.URI (isURI, parseURI, uriPath)
import Network.Gitit.Interface (askUser, liftIO, processWithM, uEmail, Plugin(PreCommitTransform), Inline(Link))
import Text.Pandoc (defaultParserState, readMarkdown)
plugin :: Plugin
plugin = PreCommitTransform archivePage
-- archivePage :: (MonadIO m) => String -> ReaderT (Config, Maybe User) (StateT IO) String
archivePage x = do mbUser <- askUser
let email = case mbUser of
Nothing -> "[email protected]"
Just u -> uEmail u
let p = readMarkdown defaultParserState x
-- force evaluation and archiving side-effects
_p' <- liftIO $ processWithM (archiveLinks email) p
return x -- note: this is read-only - don't actually change page!
archiveLinks :: String -> Inline -> IO Inline
archiveLinks e x@(Link _ (uln, _)) = checkArchive e uln >> return x
archiveLinks _ x = return x
-- | Error check the URL and then archive it both ways
checkArchive :: (MonadIO m) => String -> String -> m ()
checkArchive email url = when (isURI url) $ liftIO (webciteArchive email url >> alexaArchive url)
webciteArchive :: String -> String -> IO ThreadId
webciteArchive email url = forkIO (ignore $ openURL ("http://www.webcitation.org/archive?url=" ++ url ++ "&email=" ++ email))
where openURL = simpleHTTP . getRequest
ignore = fmap $ const ()
alexaArchive :: String -> IO ()
alexaArchive url = do let archiveform = Form POST (fromJust $ parseURI "http://www.alexa.com/help/crawlrequest")
[("url", url), ("submit", "")]
(uri, resp) <- browse $ request $ formToRequest archiveform
when (uriPath uri /= "/help/crawlthanks") $
error $ "Request failed! Did Alexa change webpages? Response:" ++ rspBody resp
| tphyahoo/gititpt | plugins/WebArchiver.hs | gpl-2.0 | 2,759 | 0 | 13 | 679 | 604 | 323 | 281 | 34 | 2 |
{- |
Module : $Header$
Description : translate VSE to S-Expressions
Copyright : (c) C. Maeder, DFKI 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : portable
translation of VSE to S-Expressions
-}
module VSE.ToSExpr where
import VSE.As
import VSE.Ana
import VSE.Fold
import CASL.AS_Basic_CASL
import CASL.Fold
import CASL.Sign
import CASL.ToSExpr
import Common.AS_Annotation
import Common.Id
import Common.LibName
import Common.ProofUtils
import Common.SExpr
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Char (toLower)
import Data.List (sortBy)
import Data.Ord (comparing)
addUniformRestr :: Sign f Procs -> [Named Sentence] ->
(Sign f Procs, [Named Sentence])
addUniformRestr sig nsens = let
namedConstr = filter (\ ns -> case sentence ns of
ExtFORMULA
(Ranged
(RestrictedConstraint _ _ _)
_) -> True
_ -> False ) nsens
restrConstr = map sentence namedConstr
restrToSExpr (procs, tSens)
(ExtFORMULA
(Ranged (RestrictedConstraint constrs restr _flag) _r)) =
let
(genSorts, genOps, _maps) = recover_Sort_gen_ax constrs
genUniform sorts ops s = let
hasResSort sn ~(Qual_op_name _ opType _) = res_OP_TYPE opType == sn
argLength ~(Qual_op_name _ (Op_type _ args _ _) _) = length args
ctors = sortBy (comparing argLength) $ filter (hasResSort s) ops
genCodeForCtor ~(Qual_op_name ctor (Op_type _ args sn _) _) prg = let
decls = genVars args
vs = map (\ (i, a) -> Var_decl [i] a nullRange) decls
recCalls = map (\ (i, x) ->
Ranged (Call (Predication
(Qual_pred_name
(gnUniformName s)
(Pred_type [x] nullRange) nullRange)
[Qual_var i x nullRange] nullRange
)) nullRange) $
filter (flip elem sorts . snd) decls
recCallsSeq = if null recCalls then Ranged Skip nullRange else
foldr1 (\ p1 p2 -> Ranged (Seq p1 p2) nullRange) recCalls
in case recCalls of
[] -> Ranged (
Block (Var_decl [yVar] s nullRange : vs)
(Ranged (Seq
(Ranged
(Assign yVar (Qual_var xVar sn nullRange)
) nullRange)
(Ranged (Seq (Ranged
(Assign
yVar
(Application
(Qual_op_name
ctor
(Op_type Partial args sn nullRange)
nullRange)
(map toQualVar vs)
nullRange))
nullRange)
(Ranged (If (Strong_equation
(Application
(
Qual_op_name
(gnEqName s)
(Op_type Partial [s, s]
uBoolean nullRange)
nullRange
) [Qual_var
xVar
s nullRange,
Qual_var
yVar
s nullRange
] nullRange)
aTrue nullRange)
(Ranged Skip nullRange)
prg) nullRange))
nullRange )) nullRange) ) nullRange
_ -> Ranged (
Block (Var_decl [yVar] s nullRange : vs)
(Ranged (Seq (Ranged (Assign yVar
(Qual_var xVar sn nullRange)
) nullRange)
(Ranged
(Seq recCallsSeq
(Ranged (Seq
(Ranged
(Assign
yVar
(Application
(Qual_op_name
ctor
(Op_type Partial args sn nullRange)
nullRange)
(map toQualVar vs)
nullRange))
nullRange)
(Ranged (If (Strong_equation
( Application
( Qual_op_name
(gnEqName s)
(Op_type Partial [s, s]
uBoolean nullRange)
nullRange
) [Qual_var
xVar
s nullRange,
Qual_var
yVar
s nullRange
] nullRange)
aTrue nullRange)
(Ranged Skip nullRange) prg) nullRange))
nullRange )) nullRange)) nullRange) ) nullRange
in
[makeNamed "" $ ExtFORMULA $
Ranged (Defprocs [
Defproc Proc (gnUniformName s) [xVar]
(Ranged (
Block [] ( foldr genCodeForCtor (Ranged Abort nullRange)
ctors)
) nullRange
)
nullRange])
nullRange,
(makeNamed "" $
Quantification Universal [Var_decl [xVar] s nullRange]
(Implication
( ExtFORMULA $ Ranged
(Dlformula Diamond ( Ranged
(Call $ Predication
(Qual_pred_name
(Map.findWithDefault (gnRestrName s) s restr)
(Pred_type [s] nullRange) nullRange)
[Qual_var xVar s nullRange] nullRange) nullRange)
(True_atom nullRange))
nullRange)
( ExtFORMULA $ Ranged
(Dlformula Diamond (Ranged
(Call $ Predication
(Qual_pred_name (gnUniformName s)
(Pred_type [s] nullRange) nullRange)
[Qual_var xVar s nullRange] nullRange) nullRange)
(True_atom nullRange))
nullRange) True nullRange) nullRange) {isAxiom = False}]
procDefs = concatMap (genUniform genSorts genOps) genSorts
procs' = Map.fromList $
map (\ s -> (gnUniformName s,
Profile [Procparam In s] Nothing)) genSorts
in
(Map.union procs procs', tSens ++ procDefs)
restrToSExpr _ _ = error "should not be anything than restricted constraints"
(newProcs, trSens) = foldl restrToSExpr (Map.empty, []) restrConstr
in
(sig {
predMap = addMapSet (predMap sig) $ procsToPredMap $ Procs newProcs,
extendedInfo = Procs $ Map.union newProcs (procsMap $ extendedInfo sig)},
nameAndDisambiguate $
trSens ++ filter (not . flip elem namedConstr) nsens)
namedSenToSExpr :: Sign f Procs -> Named Sentence -> SExpr
namedSenToSExpr sig ns =
SList
[ SSymbol "asentence"
, SSymbol $ transString $ senAttr ns
, SSymbol $ if isAxiom ns then "axiom" else "obligation"
, SSymbol $ if isAxiom ns then "proved" else "open"
, senToSExpr sig $ sentence ns ]
senToSExpr :: Sign f Procs -> Sentence -> SExpr
senToSExpr sig s = let ns = sentenceToSExpr sig s in case s of
ExtFORMULA (Ranged (Defprocs _) _) ->
SList [SSymbol "defprocs-sentence", ns]
Sort_gen_ax _ _ ->
SList [SSymbol "generatedness-sentence", ns]
_ -> SList [SSymbol "formula-sentence", ns]
sentenceToSExpr :: Sign f Procs -> Sentence -> SExpr
sentenceToSExpr sign = let sig = addSig const sign boolSig in
foldFormula $ sRec sig $ dlFormulaToSExpr sig
dlFormulaToSExpr :: Sign f Procs -> Dlformula -> SExpr
dlFormulaToSExpr sig = vseFormsToSExpr sig . unRanged
vseFormsToSExpr :: Sign f Procs -> VSEforms -> SExpr
vseFormsToSExpr sig vf = case vf of
Dlformula b p s ->
SList [SSymbol $ case b of
Box -> "box"
Diamond -> "diamond", progToSExpr sig p, sentenceToSExpr sig s]
Defprocs ds ->
SList $ SSymbol "defprocs" : map (defprocToSExpr sig) ds
RestrictedConstraint _ _ _ ->
error "restricted constraints should be handled separately"
vDeclToSExpr :: Sign f Procs -> VarDecl -> SExpr
vDeclToSExpr sig (VarDecl v s m _) =
let vd@(SList [_, w, ty]) = varDeclToSExpr (v, s) in
case m of
Nothing -> vd
Just trm -> SList [ SSymbol "vardecl", w, ty
, foldTerm (sRec sig $ error "vDeclToSExpr") trm ]
procIdToSSymbol :: Sign f Procs -> Id -> SExpr
procIdToSSymbol sig n = case lookupProc n sig of
Nothing -> error "procIdToSSymbol"
Just pr -> case profileToOpType pr of
Just ot -> opIdToSSymbol sig n ot
_ -> predIdToSSymbol sig n $ profileToPredType pr
progToSExpr :: Sign f Procs -> Program -> SExpr
progToSExpr sig = let
pRec = sRec sig (error "progToSExpr")
termToSExpr = foldTerm pRec
formToSExpr = foldFormula pRec
in foldProg FoldRec
{ foldAbort = const $ SSymbol "abort"
, foldSkip = const $ SSymbol "skip"
, foldAssign = \ _ v t ->
SList [SSymbol "assign", varToSSymbol v, termToSExpr t]
, foldCall = \ (Ranged _ r) f ->
case f of
Predication (Qual_pred_name i _ _) ts _ ->
SList $ SSymbol "call" : procIdToSSymbol sig i : map termToSExpr ts
_ -> sfail "Call" r
, foldReturn = \ _ t -> SList [SSymbol "return", termToSExpr t]
, foldBlock = \ ~(Ranged (Block vs p) _) _ _ ->
let (vds, q) = addInits (toVarDecl vs) p
ps = progToSExpr sig q
nvs = map (vDeclToSExpr sig) vds
in if null nvs then ps else SList [SSymbol "vblock", SList nvs, ps]
, foldSeq = \ _ s1 s2 -> SList [SSymbol "seq", s1, s2]
, foldIf = \ _ c s1 s2 -> SList [SSymbol "if", formToSExpr c, s1, s2]
, foldWhile = \ _ c s -> SList [SSymbol "while", formToSExpr c, s] }
defprocToSExpr :: Sign f Procs -> Defproc -> SExpr
defprocToSExpr sig (Defproc k n vs p _) = SList
[ SSymbol $ case k of
Proc -> "defproc"
Func -> "deffuncproc"
, procIdToSSymbol sig n
, SList $ map varToSSymbol vs
, progToSExpr sig p ]
paramToSExpr :: Procparam -> SExpr
paramToSExpr (Procparam k s) = SList
[ SSymbol $ map toLower $ show k
, sortToSSymbol s ]
procsToSExprs :: (Id -> Bool) -> Sign f Procs -> [SExpr]
procsToSExprs f sig =
map (\ (n, pr@(Profile as _)) -> case profileToOpType pr of
Nothing -> SList
[ SSymbol "procedure"
, predIdToSSymbol sig n $ profileToPredType pr
, SList $ map paramToSExpr as ]
Just ot -> SList
[ SSymbol "funcprocedure"
, opIdToSSymbol sig n ot
, SList $ map sortToSSymbol $ opArgs ot
, sortToSSymbol $ opRes ot ])
$ Map.toList $ Map.filterWithKey (\ i _ -> f i)
$ procsMap $ extendedInfo sig
vseSignToSExpr :: Sign f Procs -> SExpr
vseSignToSExpr sig =
let e = extendedInfo sig in
SList $ SSymbol "signature" : sortSignToSExprs sig
: predMapToSExprs sig (diffMapSet (predMap sig) $ procsToPredMap e)
++ opMapToSExprs sig (diffOpMapSet (opMap sig) $ procsToOpMap e)
++ procsToSExprs (const True) sig
qualVseSignToSExpr :: SIMPLE_ID -> LibId -> Sign f Procs -> SExpr
qualVseSignToSExpr nodeId libId sig =
let e = extendedInfo sig in
SList $ SSymbol "signature" : sortSignToSExprs sig
{ sortRel = Rel.delSet
(Set.filter (not . isQualNameFrom nodeId libId) $ sortSet sig)
$ sortRel sig }
: predMapToSExprs sig
(MapSet.filterWithKey (\ i _ -> isQualNameFrom nodeId libId i)
. MapSet.difference (predMap sig) $ procsToPredMap e)
++ opMapToSExprs sig
(MapSet.filterWithKey (\ i _ -> isQualNameFrom nodeId libId i)
. diffOpMapSet (opMap sig) $ procsToOpMap e)
++ procsToSExprs (isQualNameFrom nodeId libId) sig
| nevrenato/Hets_Fork | VSE/ToSExpr.hs | gpl-2.0 | 12,824 | 155 | 37 | 5,262 | 2,931 | 1,596 | 1,335 | 280 | 5 |
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, TupleSections,
BangPatterns
#-}
module Tombot.Discourse where
import Control.BoolLike ((>&>), (<&<))
import Control.Applicative ((<$>), (<|>))
import Control.Lens ((^.), Field1(..), over)
import Control.Monad
import Data.Aeson
import Data.Aeson.Types (parse, parseMaybe, Parser)
import Data.Attoparsec.Text (Parser)
import qualified Data.Attoparsec.Text as A
import qualified Data.Attoparsec.Combinator as A
import Data.Either (rights)
import Data.Maybe (catMaybes, fromJust, listToMaybe)
import qualified Data.Map as Map
import Data.Monoid ((<>), mempty)
import Data.String (IsString(..))
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Vector (Vector)
import qualified Data.Vector as V
import Network.Wreq
import Safe
import System.Environment (getArgs)
import System.IO.Unsafe (unsafePerformIO)
board = "https://www.newflood.net/"
user = "Shou"
key = discKey . stripSpace . unsafePerformIO $ readFile "irc/api"
where
discKey = Map.lookup "discourse-newflood" . read
stripSpace = dropWhile (`elem` [' ', '\n']) . takeWhile (/= '\n')
latest = "latest.json"
topic :: (Integral int, Show int, IsString string, Monoid string)
=> int -> string
topic n = "/t/" <> (fromString $ show n) <> ".json"
posts :: (Integral int, Show int, IsString string, Monoid string)
=> int -> string
posts n = "/posts/" <> (fromString $ show n) <> ".json"
url path = board <> path <> "?api_key=" <> key <> "&api_username=" <> user
till' :: (MonadPlus m) => m a -> m b -> m ([a], b)
till' p end = mapFst ($ []) <$> scan (id, undefined)
where scan (fp, e) = liftM (fp,) end `mplus` do !a <- p; scan (fp . (a:), e)
mapFst f (x, y) = (f x, y)
-- | Strips arbitrary tags surrounded by @o@ and @c@ preserving any
-- text in between the opening tag and closing tag, except for
-- whatever matches @p@, which is then treated like a block element and
-- replaced by newlines, useful for e.g. <code> or [quote].
stripTags :: Char -> Char -> (Text -> Bool) -> A.Parser Text
stripTags o c p = do
(pre, tag) <- over _1 T.pack <$> till' A.anyChar tagOpen
(mins, mmid) <- (,) <$> maybeTags <*> fmap Just (tillTagClose tag)
mend <- maybeTags
let mQuote = if p tag then Nothing else Just ()
return . T.concat $ catMaybes
[ Just pre, mins <* mQuote
, (mmid <* mQuote) <|> Just "\n", mend
]
where
tagOpen = do
A.char o
t <- A.takeWhile (`notElem` ['=', c, ' '])
A.skipWhile (/= c)
A.char c
return t
tagClose t = A.char o >> "/" *> A.string t >> A.char c
tillTagClose t = T.pack <$> A.manyTill' A.anyChar (tagClose t)
maybeTags = fmap Just (stripTags o c p) <|> return Nothing
stripBBCode = liftM2 (<>) (stripTags '[' ']' (== "quote")) A.takeText
-- TODO add HTML block elements to annihilate, e.g. <pre>
-- FIXME remove unary tags like <img>
stripHTML = liftM2 (<>) (stripTags '<' '>' p) A.takeText
where
p = flip elem ["code"]
line = A.takeWhile1 (/= '\n')
-- FIXME TODO lazy implementation
stripMarkdown :: A.Parser (Either Text Text)
stripMarkdown = do
-- Either designates whether we keep the result or discard it, later.
A.choice [h1, h2, hn, link, quote, unorderedList, orderedList]
where
h1 = fmap Right $ line <* "\n" <* A.many1 (A.char '=')
h2 = fmap Right $ line <* "\n" <* A.many1 (A.char '-')
hn = let hashes = A.string `map` reverse (scanl (flip T.cons) "#" "#####")
in fmap Right $ A.choice hashes *> A.skipSpace *> line
link = fmap Right $ "[" *> A.takeWhile1 (/= ']') <* "(" <* A.skipWhile (/= ')') <* ")"
-- TODO parse lazy, 80col quoteblocks, for all above
quote = fmap (Left . T.unlines) . A.many1 $ "\n" *> A.skipSpace *> line
unorderedList = do
let lp = "\n" *> A.skipSpace *> A.choice (map A.char ('+':"*-")) *> line
Left . T.unlines <$> A.many1 lp
-- TODO requires recursion: List has to start with "1." not "N."
orderedList = do
let lp = "\n" *> A.skipSpace *> A.digit *> "." *> A.skipSpace *> line
Left . T.unlines <$> A.many1 lp
-- FIXME remove use of `fromJust`
-- | Get latest Discourse thread IDs
getThreadIds :: IO (Vector Int)
getThreadIds = do
r <- fmap (^. responseBody) . get $ url latest
let mtids = do
json <- decode r
flip parseMaybe json $ \obj -> do
(tobj :: Object) <- obj .: "topic_list"
(tops :: Array) <- tobj .: "topics"
let f :: Value -> Int
f = fromJust . parseMaybe id . withObject "" (.: "id")
return $ V.map f tops
tids = maybe V.empty id mtids
return tids
getPostIds tid = do
r <- fmap (^. responseBody) . get . url $ topic tid
let mpids = do
json <- decode r
flip parseMaybe json $ \obj -> do
(pstream :: Object) <- obj .: "post_stream"
(stream :: Array) <- pstream .: "stream"
let f :: Value -> Int
f = fromJust . parseMaybe id . parseJSON
return $ V.map f stream
pids = maybe V.empty id mpids
return pids
getPost pid = do
r <- fmap (^. responseBody) . get . url $ posts pid
let mpost = do
json <- decode r
flip parseMaybe json $ \obj -> do
(raw :: Text) <- obj .: "raw"
return raw
post = maybe mempty id mpost
return post
getThread tid = do
pids <- getPostIds tid
V.forM pids getPost
run n = do
rawp <- getPost n
let eps = rights $ [A.parseOnly stripBBCode rawp]
forM_ eps T.putStrLn
main :: IO ()
main = do
-- XXX listToMaybe only takes the head
margs <- fmap readMay . listToMaybe <$> getArgs
mint <- maybe (readMay <$> getLine) return margs
maybe warnNoArg run mint
where
warnNoArg = putStrLn "No argument Int provided."
| Shou/Tombot | Tombot/Discourse.hs | gpl-2.0 | 6,019 | 0 | 21 | 1,608 | 2,116 | 1,105 | 1,011 | 127 | 2 |
-- ArbolBin.hs
-- TAD de árboles binarios de búsqueda e implementación.
-- Tablas mediante matrices.
-- José A. Alonso Jiménez https://jaalonso.github.com
-- =====================================================================
-- Un árbol binario de búsqueda (ABB) es un árbol binario tal que el
-- valor de cada nodo es mayor que los valores de su subárbol izquierdo
-- y es menor que los valores de su subárbol derecho y, además, ambos
-- subárboles son árboles binarios de búsqueda. Por ejemplo, al
-- almacenar los valores de [2,3,4,5,6,8,9] en un ABB se puede obtener
-- los siguientes ABB:
--
-- 5 5
-- / \ / \
-- / \ / \
-- 2 6 3 8
-- \ \ / \ / \
-- 4 8 2 4 6 9
-- / \
-- 3 9
--
-- El objetivo principal de los ABB es reducir el tiempo de acceso a los
-- valores.
module Tema_19.ArbolBin
(ABB,
vacio, -- ABB
inserta, -- (Ord a, Show a) => a -> ABB a -> ABB a
elimina, -- (Ord a, Show a) => a -> ABB a -> ABB a
crea, -- (Ord a, Show a) => [a] -> ABB a
crea', -- (Ord a, Show a) => [a] -> ABB a
menor, -- Ord a => ABB a -> a
elementos, -- (Ord a, Show a) => ABB a -> [a]
pertenece, -- (Ord a, Show a) => a -> ABB a -> Bool
valido, -- (Ord a, Show a) => ABB a -> Bool
escribeABB, -- Show a => ABB a -> String
ejemploABB -- Int -> ABB Int
) where
-- Los ABB como tipo de dato algebraico.
data ABB a = Vacio
| Nodo a (ABB a) (ABB a)
deriving Eq
-- (escribeABB a) es la cadena correspondiente al ABB a. Por ejemplo,
-- λ> escribeABB (crea (reverse [5,2,6,4,8,3,9]))
-- " (5 (2 - (4 (3 - -) -)) (6 - (8 - (9 - -))))"
-- λ> escribeABB (foldr inserta vacio (reverse [5,2,4,3,8,6,7,10,9,11]))
-- " (5 (2 - (4 (3 - -) -)) (8 (6 - (7 - -)) (10 (9 - -) (11 - -))))"
escribeABB :: Show a => ABB a -> String
escribeABB Vacio = " -"
escribeABB (Nodo x i d) = " (" ++ show x ++ escribeABB i ++ escribeABB d ++ ")"
-- Procedimiento de escritura de ABB.
instance Show a => Show (ABB a) where
show = escribeABB
-- Ejemplos de ABB
-- λ> ejemploABB 1
-- (5 (2 - (4 (3 - -) -)) (6 - (8 - (9 - -))))
-- λ> ejemploABB 2
-- (5 (2 - (4 (3 - -) -)) (8 (6 - (7 - -)) (10 (9 - -) (11 - -))))
ejemploABB :: Int -> ABB Int
ejemploABB 1 = crea (reverse [5,2,6,4,8,3,9])
ejemploABB 2 = foldr inserta vacio (reverse [5,2,4,3,8,6,7,10,9,11])
ejemploABB _ = error "No definido"
-- vacio es el ABB vacío. Por ejemplo,
-- λ> vacio
-- -
vacio :: ABB a
vacio = Vacio
-- (pertenece v' a) se verifica si v' es el valor de algún nodo del ABB
-- a. Por ejemplo,
-- pertenece 3 (ejemploABB 1) == True
-- pertenece 7 (ejemploABB 1) == False
pertenece :: (Ord a, Show a) => a -> ABB a -> Bool
pertenece _ Vacio = False
pertenece v' (Nodo v i d) | v == v' = True
| v' < v = pertenece v' i
| otherwise = pertenece v' d
-- (inserta v a) es el árbol obtenido añadiendo el valor v al ABB a, si
-- no es uno de sus valores. Por ejemplo,
-- λ> inserta 7 (ejemploABB 1)
-- (5 (2 - (4 (3 - -) -)) (6 - (8 (7 - -) (9 - -))))
inserta :: (Ord a, Show a) => a -> ABB a -> ABB a
inserta v' Vacio = Nodo v' Vacio Vacio
inserta v' (Nodo v i d) | v' == v = Nodo v i d
| v' < v = Nodo v (inserta v' i) d
| otherwise = Nodo v i (inserta v' d)
-- (crea vs) es el ABB cuyos valores son vs. Por ejemplo,
-- λ> crea [3,7,2]
-- (2 - (7 (3 - -) -))
crea :: (Ord a, Show a) => [a] -> ABB a
crea = foldr inserta Vacio
-- (crea' vs) es el ABB de menor profundidad cuyos valores son los de
-- la lista ordenada vs. Por ejemplo,
-- λ> crea' [2,3,7]
-- (3 (2 - -) (7 - -))
crea' :: (Ord a, Show a) => [a] -> ABB a
crea' [] = Vacio
crea' vs = Nodo x (crea' l1) (crea' l2)
where n = length vs `div` 2
l1 = take n vs
(x:l2) = drop n vs
-- (elementos a) es la lista de los valores de los nodos del ABB en el
-- recorrido inorden. Por ejemplo,
-- elementos (ejemploABB 1) == [2,3,4,5,6,8,9]
-- elementos (ejemploABB 2) == [2,3,4,5,6,7,8,9,10,11]
elementos :: (Ord a, Show a) => ABB a -> [a]
elementos Vacio = []
elementos (Nodo v i d) = elementos i ++ [v] ++ elementos d
-- (elimina v a) es el ABB obtenido borrando el valor v del ABB a. Por
-- ejemplo,
-- λ> (ejemploABB 1)
-- (5 (2 - (4 (3 - -) -)) (6 - (8 - (9 - -))))
-- λ> elimina 3 (ejemploABB 1)
-- (5 (2 - (4 - -)) (6 - (8 - (9 - -))))
-- λ> elimina 2 (ejemploABB 1)
-- (5 (4 (3 - -) -) (6 - (8 - (9 - -))))
-- λ> elimina 5 (ejemploABB 1)
-- (6 (2 - (4 (3 - -) -)) (8 - (9 - -)))
-- λ> elimina 7 (ejemploABB 1)
-- (5 (2 - (4 (3 - -) -)) (6 - (8 - (9 - -))))
elimina :: (Ord a, Show a) => a -> ABB a -> ABB a
elimina _ Vacio = Vacio
elimina v' (Nodo v i Vacio) | v'==v = i
elimina v' (Nodo v Vacio d) | v'==v = d
elimina v' (Nodo v i d) | v' < v = Nodo v (elimina v' i) d
| v' > v = Nodo v i (elimina v' d)
| otherwise = Nodo k i (elimina k d)
where k = menor d
-- (menor a) es el mínimo valor del ABB a. Por ejemplo,
-- menor (ejemploABB 1) == 2
menor :: Ord a => ABB a -> a
menor (Nodo v Vacio _) = v
menor (Nodo _ i _) = menor i
menor Vacio = error "Imposible"
-- (menorTodos v a) se verifica si v es menor que todos los elementos
-- del ABB a.
menorTodos :: (Ord a, Show a) => a -> ABB a -> Bool
menorTodos _ Vacio = True
menorTodos v a = v < minimum (elementos a)
-- (mayorTodos v a) se verifica si v es mayor que todos los elementos
-- del ABB a.
mayorTodos :: (Ord a, Show a) => a -> ABB a -> Bool
mayorTodos _ Vacio = True
mayorTodos v a = v > maximum (elementos a)
-- (valido a) se verifica si a es un ABB correcto. Por ejemplo,
-- valido (ejemploABB 1) == True
valido :: (Ord a, Show a) => ABB a -> Bool
valido Vacio = True
valido (Nodo v a b) = mayorTodos v a &&
menorTodos v b &&
valido a &&
valido b
| jaalonso/I1M-Cod-Temas | src/Tema_19/ArbolBin.hs | gpl-2.0 | 6,240 | 0 | 9 | 1,963 | 1,343 | 718 | 625 | 72 | 1 |
-- Sortable Test
-- Copyright (C) 2015 Jonathan Lamothe
-- <[email protected]>
-- This program is free software: you can redistribute it and/or
-- modify it under the terms of the GNU General Public License as
-- published by the Free Software Foundation, either version 3 of the
-- License, or (at your option) any later version.
-- This program is distributed in the hope that it will be useful, but
-- WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- General Public License for more details.
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see
-- <http://www.gnu.org/licenses/>.
module Daily.Tests.Process (tests) where
import Common.Types
import Daily (process)
import Daily.Types
import Data.Map ((!))
import qualified Data.Map as Map
import Data.Time.Calendar (Day, fromGregorian)
import Test.HUnit (Test (..), assertBool, (@=?))
tests :: Test
tests = TestLabel "Daily.process tests" $
TestList [ recordCountTest
, fieldTest
, recordTests
]
recordCountTest :: Test
recordCountTest = TestLabel "record count" $
TestCase $ Map.size (processedRecords expected)
@=? Map.size (processedRecords actual)
fieldTest :: Test
fieldTest = TestLabel "field count" $
TestCase $ fields @=? processedFields actual
recordTests :: Test
recordTests = TestLabel "record tests" $
TestList $ map recordTests' actualRecords
where actualRecords = Map.keys $ processedRecords actual
recordTests' :: Day -> Test
recordTests' day =
TestLabel ("record for " ++ show day) $
TestList [ statCountTest expectedRecord actualRecord
, statTests expectedRecord actualRecord
]
where
expectedRecord = processedRecords expected ! day
actualRecord = processedRecords actual ! day
statCountTest :: ProcessedRecord -> ProcessedRecord -> Test
statCountTest expected actual =
TestLabel "value counts" $
TestCase $ length expected @=? length actual
statTests :: ProcessedRecord -> ProcessedRecord -> Test
statTests expected actual =
TestLabel "stat tests" $
TestList $ map statTests' $ zip3 fields expected actual
statTests' :: (String, Stats, Stats) -> Test
statTests' (label, expected, actual) =
TestLabel label $
TestList $ map (statTest expected actual)
[ ("statSum", statSum)
, ("statMax", statMax)
, ("statMin", statMin)
, ("statAvg", statAvg)
, ("statStdDev", statStdDev)
]
statTest :: Stats -> Stats -> (String, Stats -> Double) -> Test
statTest expected actual (label, f) =
TestLabel label $
TestCase $ assertBool errMsg $
isClose expectedVal actualVal
where
errMsg = "expected: " ++ show expectedVal ++ " got: " ++ show actualVal
expectedVal = f expected
actualVal = f actual
isClose :: Double -> Double -> Bool
isClose x y =
if x == 0 || y == 0
then abs (x - y) < delta
else abs (1 - x / y) < delta
where delta = 0.0001
input :: InputData
input =
emptyInputData { inputFields = fields
, inputRecords = records
}
fields :: [String]
fields = map (\x -> "column " ++ show x) [1..3]
records :: [InputRecord]
records = [ InputRecord day1 "foo" [1, 2, 3]
, InputRecord day1 "bar" [4, 5, 6]
, InputRecord day2 "foo" [6, 5, 4]
, InputRecord day2 "bar" [3, 2, 1]
]
expected :: ProcessedData
expected = ProcessedData fields $
Map.fromList [ (day1, day1record)
, (day2, day2record)
]
actual :: ProcessedData
actual = process input
day1 :: Day
day1 = fromGregorian 1970 1 1
day2 :: Day
day2 = fromGregorian 1970 1 2
day1record :: ProcessedRecord
day1record = [day1col1, day1col2, day1col3]
day2record :: ProcessedRecord
day2record = [day2col1, day2col2, day2col3]
day1col1 :: Stats
day1col1 = buildStats 1 4
day1col2 :: Stats
day1col2 = buildStats 2 5
day1col3 :: Stats
day1col3 = buildStats 3 6
day2col1 :: Stats
day2col1 = buildStats 6 3
day2col2 :: Stats
day2col2 = buildStats 5 2
day2col3 :: Stats
day2col3 = buildStats 4 1
buildStats :: Double -> Double -> Stats
buildStats x y =
Stats { statSum = x + y
, statMax = max x y
, statMin = min x y
, statAvg = avg x y
, statStdDev = stdDev x y
}
avg :: Double -> Double -> Double
avg x y = (x + y) / 2
stdDev :: Double -> Double -> Double
stdDev x y = sqrt ((x - avg x y) ^ 2 + (y - avg x y) ^ 2) / 2
-- jl
| jlamothe/sortable-test | Daily/Tests/Process.hs | gpl-3.0 | 4,479 | 0 | 13 | 1,011 | 1,261 | 693 | 568 | 110 | 2 |
-- Haskell Practical 3 Common Parsing Code
-- By James Cowgill
module Prac3.Combinators where
type Parse t = (Bool, [t])
type Parser t = [t] -> Parse t
-- Terminal symbol
terminal :: Eq t => t -> Parser t
terminal t (e:es) | t == e = (True, es)
terminal _ es = (False, es)
-- Empty string
empty :: Parser t
empty ts = (True, ts)
-- Sequence
infixr 5 +>
(+>) :: Eq t => Parser t -> Parser t -> Parser t
(f +> g) ts
| qf = g rf
| otherwise = (False, ts)
where
(qf, rf) = f ts
-- Choice
infixl 4 <>
(<>) :: Eq t => Parser t -> Parser t -> Parser t
(f <> g) ts
| b = fts
| otherwise = g ts
where
fts@(b,_) = f ts
| jcowgill/cs-work | syac/compilers/Prac3/Combinators.hs | gpl-3.0 | 684 | 0 | 11 | 217 | 318 | 168 | 150 | 20 | 1 |
module P38Filter where
import Control.Exception (catch, IOException)
main :: IO ()
main = do
xs <- promptXs "Enter a list of numbers separated by spaces: "
let evens = filt evn xs
putStrLn $ "The even numbers are: " ++ show evens
filt :: (a -> Bool) -> [a] -> [a]
filt p = foldr (\x xs -> if p x then x:xs else xs) []
-- originally did:
-- filt _ [] = []
-- filt p (x:xs) = if p x then x : filt p xs else filt p xs
-- not used, but cool
map' :: (a -> b) -> [a] -> [b]
map' f = foldr ((:) . f) []
evn :: Int -> Bool
evn n = n `mod` 2 == 0
od :: Int -> Bool
od = not . evn
promptXs :: String -> IO [Int]
promptXs m = do
putStr m
x <- getLine
return (map read $ words x) `catch` except
where
except e = do
putStrLn $ "Couldn't parse number. Error was: " ++ show (e::IOException)
promptXs m
| ciderpunx/57-exercises-for-programmers | src/P38Filter.hs | gpl-3.0 | 836 | 0 | 11 | 228 | 332 | 174 | 158 | 23 | 2 |
import Test.HUnit
import ProjectM36.Base
import TutorialD.Interpreter.Import.TutorialD
import System.Exit
import qualified Data.Text as T
import System.IO.Temp
import System.FilePath
import qualified Data.Map as M
import System.IO
import qualified Data.ByteString as BS
import qualified Data.Text.Encoding as TE
import Text.URI hiding (makeAbsolute)
main :: IO ()
main = do
tcounts <- runTestTT $ TestList [testTutdFileImport
,testTutdHTTPSImport
]
if errors tcounts + failures tcounts > 0 then exitFailure else exitSuccess
testTutdFileImport :: Test
testTutdFileImport = TestCase $
withSystemTempFile "m.tutd" $ \tempPath handle -> do
BS.hPut handle (TE.encodeUtf8 "x:=relation{tuple{a 5,b \"spam\"}}; y:=relation{tuple{b \"漢字\"}}")
hClose handle
let expectedExpr = MultipleExpr [
Assign "x" (MakeRelationFromExprs Nothing
$ TupleExprs () [TupleExpr (M.fromList [("a", NakedAtomExpr $ IntegerAtom 5),
("b", NakedAtomExpr $ TextAtom "spam")])]),
Assign "y" (MakeRelationFromExprs Nothing
$ TupleExprs () [TupleExpr (M.fromList [("b", NakedAtomExpr (TextAtom "漢字"))])])]
--on Windows, the file URI should not include the drive letter "/c/Users..." -> "/Users"
let uri = "file://" <> map (\c -> if c == '\\' then '/' else c) ( joinDrive "/" (dropDrive tempPath))
fileURI <- mkURI (T.pack uri)
imported <- importTutorialDFromFile fileURI Nothing
assertEqual "import tutd" (Right expectedExpr) imported
testTutdHTTPSImport :: Test
testTutdHTTPSImport = TestCase $ do
uri <- mkURI "https://raw.githubusercontent.com/agentm/project-m36/master/test/TutorialD/Interpreter/Import/httpimporttest.tutd"
let hash = "effe32b247586dc3ac0079fc241b9618d41d189afcaeb7907edbe5a8b45992a4"
expected = Right (MultipleExpr [Assign "x" (RelationVariable "true" ()),Assign "y" (RelationVariable "false" ())])
actual <- importTutorialDViaHTTP uri (Just hash)
assertEqual "github https" expected actual
| agentm/project-m36 | test/TutorialD/Interpreter/Import/ImportTest.hs | unlicense | 2,127 | 1 | 26 | 459 | 548 | 283 | 265 | 39 | 2 |
module Poset.A064097 (a064097) where
import Data.MemoCombinators (integral)
import Helpers.Primes (primeFactors)
a064097 :: Integer -> Integer
a064097 = integral a064097' where
a064097' 1 = 0
a064097' n = 1 + a064097 (n - (n `div` head (primeFactors n)))
| peterokagey/haskellOEIS | src/Poset/A064097.hs | apache-2.0 | 260 | 0 | 15 | 42 | 98 | 54 | 44 | 7 | 2 |
{- Copyright 2014 David Farrell <[email protected]>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Core.NoExternal (plugin) where
import IRCD.Types
import IRCD.Plugin
plugin :: Plugin
plugin = defaultPlugin {startup=registerCMode 'n', transformers=[Transformer noExt 50]}
noExt :: TransformerSpec
noExt action@(PrivmsgAction (ClientSrc client) (ChannelDst channel) msg io)
| 'n' `elem` modes channel && channel `notElem` channels client = return (False, [])
| otherwise = return (True, [])
noExt _ = return (True, [])
| shockkolate/lambdircd | plugins/Core/NoExternal.hs | apache-2.0 | 1,044 | 0 | 11 | 175 | 175 | 95 | 80 | 10 | 1 |
{-# LANGUAGE PackageImports #-}
module Propellor.Info where
import Propellor.Types
import Propellor.Types.Info
import "mtl" Control.Monad.Reader
import qualified Data.Set as S
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Control.Applicative
pureInfoProperty :: Desc -> Info -> Property
pureInfoProperty desc = Property ("has " ++ desc) (return NoChange)
askInfo :: (Info -> Val a) -> Propellor (Maybe a)
askInfo f = asks (fromVal . f . hostInfo)
os :: System -> Property
os system = pureInfoProperty ("Operating " ++ show system) $
mempty { _os = Val system }
getOS :: Propellor (Maybe System)
getOS = askInfo _os
-- | Indidate that a host has an A record in the DNS.
--
-- TODO check at run time if the host really has this address.
-- (Can't change the host's address, but as a sanity check.)
ipv4 :: String -> Property
ipv4 = addDNS . Address . IPv4
-- | Indidate that a host has an AAAA record in the DNS.
ipv6 :: String -> Property
ipv6 = addDNS . Address . IPv6
-- | Indicates another name for the host in the DNS.
--
-- When the host's ipv4/ipv6 addresses are known, the alias is set up
-- to use their address, rather than using a CNAME. This avoids various
-- problems with CNAMEs, and also means that when multiple hosts have the
-- same alias, a DNS round-robin is automatically set up.
alias :: Domain -> Property
alias = addDNS . CNAME . AbsDomain
addDNS :: Record -> Property
addDNS r = pureInfoProperty (rdesc r) $
mempty { _dns = S.singleton r }
where
rdesc (CNAME d) = unwords ["alias", ddesc d]
rdesc (Address (IPv4 addr)) = unwords ["ipv4", addr]
rdesc (Address (IPv6 addr)) = unwords ["ipv6", addr]
rdesc (MX n d) = unwords ["MX", show n, ddesc d]
rdesc (NS d) = unwords ["NS", ddesc d]
rdesc (TXT s) = unwords ["TXT", s]
rdesc (SRV x y z d) = unwords ["SRV", show x, show y, show z, ddesc d]
ddesc (AbsDomain domain) = domain
ddesc (RelDomain domain) = domain
ddesc RootDomain = "@"
sshPubKey :: String -> Property
sshPubKey k = pureInfoProperty ("ssh pubkey known") $
mempty { _sshPubKey = Val k }
getSshPubKey :: Propellor (Maybe String)
getSshPubKey = askInfo _sshPubKey
hostMap :: [Host] -> M.Map HostName Host
hostMap l = M.fromList $ zip (map hostName l) l
findHost :: [Host] -> HostName -> Maybe Host
findHost l hn = M.lookup hn (hostMap l)
getAddresses :: Info -> [IPAddr]
getAddresses = mapMaybe getIPAddr . S.toList . _dns
hostAddresses :: HostName -> [Host] -> [IPAddr]
hostAddresses hn hosts = case hostInfo <$> findHost hosts hn of
Nothing -> []
Just info -> mapMaybe getIPAddr $ S.toList $ _dns info
| abailly/propellor-test2 | src/Propellor/Info.hs | bsd-2-clause | 2,602 | 6 | 11 | 489 | 846 | 446 | 400 | 53 | 9 |
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE OverloadedStrings #-}
module Gli.Gitlab where
import Data.Aeson
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Internal as BLI
import qualified Data.Text as T
import Data.Time.Format.Human (humanReadableTime)
import Data.Time.LocalTime
import Gli.Types
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Network.HTTP.Simple
import Prelude hiding (id)
apiCall :: AccountConfig -> IO BLI.ByteString
apiCall accountConfig = do
manager <- newManager tlsManagerSettings
request' <- parseRequest (url accountConfig)
let request = setRequestManager manager
$ setRequestHeader "PRIVATE-TOKEN"
[B.pack $ key (accountConfig :: AccountConfig)]
$ request'
response <- httpLBS request
return (getResponseBody response)
getProject :: T.Text -> AccountConfig -> IO Project
getProject repoUrl a = do
projectResponseBody <-
apiCall (AccountConfig (key (a :: AccountConfig)) (url a ++ "/projects"))
case parseProject projectResponseBody of
Just projects ->
return (head $ filter (\p -> ssh_url_to_repo p == repoUrl) projects)
Nothing -> error "Unable to fetch projects"
parseProject :: BLI.ByteString -> Maybe [Project]
parseProject body = decode body :: Maybe [Project]
mergeRequests :: AccountConfig -> IO ()
mergeRequests cfg = do
prResponseBody <- apiCall mcfg
let body = justBody $ parseMergeRequest prResponseBody
mapM_ (modifyAndShow cfg) body
where
mcfg = AccountConfig (key (cfg :: AccountConfig))
(url cfg ++ "/merge_requests?state=opened")
modifyAndShow :: AccountConfig -> MergeRequest -> IO ()
modifyAndShow cfg m = do
c <- humanReadableTime $ created_at (m :: MergeRequest)
u <- humanReadableTime $ updated_at (m :: MergeRequest)
b <- builds bc
putStrLn $
unlines (lines (show m)
++ [ "Created At: " ++ show c
, "Updated At: " ++ show u
, "Builds:"])
mapM_ (printBuild (web_url m) (iid (m :: MergeRequest))) b
where
bc = AccountConfig (key (cfg :: AccountConfig))
(url cfg ++ "/repository/commits/" ++ T.unpack (sha m) ++ "/builds")
parseMergeRequest :: BLI.ByteString -> Maybe [MergeRequest]
parseMergeRequest body = decode body :: Maybe [MergeRequest]
printBuild :: T.Text -> Int -> Build -> IO ()
printBuild u i b = do
c <- utcToLocalZonedTime $ created_at (b :: Build)
f <- finished_time (finished_at b)
putStrLn (unlines (lines (show b)
++ [ " Url: " ++ bUrl
, " Created At: " ++ show c
, " Finished At: " ++ f]))
where
bUrl = T.unpack (T.replace
(T.pack("/merge_requests/" ++ show i)) (T.pack "") u)
++ "/builds/"
++ show (id (b :: Build))
finished_time fa = case fa of
Just fa -> do
lf <- utcToLocalZonedTime fa
return (show $ lf)
Nothing -> return "Pending"
builds :: AccountConfig -> IO [Build]
builds cfg = do
pResponseBody <- apiCall cfg
return (justBody $ parseBuilds pResponseBody)
parseBuilds :: BLI.ByteString -> Maybe [Build]
parseBuilds body = decode body :: Maybe [Build]
justBody :: Maybe [a] -> [a]
justBody Nothing = []
justBody (Just elems) = elems
| goromlagche/gli | src/gli/gitlab.hs | bsd-3-clause | 3,577 | 0 | 17 | 1,040 | 1,092 | 553 | 539 | 82 | 2 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TemplateHaskell #-}
module Ling.Raw (module Ling.Abs, module Ling.Raw) where
import Control.Lens
import Ling.Abs
import Ling.Prelude
type Type = Term
makePrisms ''Program
makePrisms ''Dec
makePrisms ''Assertion
makePrisms ''ConName
makePrisms ''OptSig
makePrisms ''VarDec
makePrisms ''ChanDec
makePrisms ''Branch
makePrisms ''Literal
makePrisms ''ATerm
makePrisms ''Term
makePrisms ''Proc
makePrisms ''Act
makePrisms ''ASession
makePrisms ''TopCPatt
makePrisms ''CPatt
makePrisms ''OptSession
makePrisms ''RSession
makePrisms ''OptRepl
makePrisms ''CSession
aTerm :: ATerm -> Term
aTerm (Paren t NoSig) = t
aTerm t = RawApp t []
paren :: Term -> ATerm
paren (RawApp t []) = t
paren t = Paren t NoSig
annot :: Term -> Type -> Term
annot tm ty = RawApp (Paren tm (SoSig ty)) []
pPrll :: [Proc] -> Proc
pPrll = \case
[p] -> p
ps -> PPrll ps
pNxt :: Op2 Proc
pNxt (PPrll []) proc1 = proc1
pNxt proc0 (PPrll []) = proc0
pNxt proc0 proc1 = proc0 `PNxt` proc1
pDot :: Op2 Proc
pDot (PPrll []) proc1 = proc1
pDot (p00 `PDot` p01) proc1 = p00 `pDot` (p01 `pDot` proc1)
pDot proc0 (PPrll []) = proc0
pDot proc0 proc1 = proc0 `PDot` proc1
pDots :: [Proc] -> Proc
pDots = foldr pDot (PPrll [])
mkPrimOp :: String -> [Term] -> Term
mkPrimOp x = RawApp (Var (Name x)) . fmap paren
_PrimOp :: Prism' Term (String, [Term])
_PrimOp = prism (uncurry mkPrimOp) $ \case
RawApp (Var (Name x)) ts -> Right (x,aTerm <$> ts)
t -> Left t
| np/ling | Ling/Raw.hs | bsd-3-clause | 1,584 | 0 | 13 | 373 | 678 | 335 | 343 | 56 | 2 |
module Math.Triangulate(
triangulate
, triangulate2D
) where
import Linear
import Control.Lens
import qualified Graphics.Triangulation.Delaunay as D
import qualified Data.Vector.V2 as Vec
import qualified Data.Vector as V
import Data.Vector (Vector)
import Math.Plane
-- | Normalize coords of given vectors to fit [0 .. 1] range and return info
-- how to denormalize them back.
normVecs :: Vector (V2 Float) -> (V2 Float -> V2 Float, Vector (V2 Float))
normVecs vs = (\v -> v * dv + minv, (\v -> (v - minv) / dv) <$> vs)
where
minx = minimum . fmap (^. _x) $ vs
miny = minimum . fmap (^. _y) $ vs
minv = V2 minx miny
maxx = maximum . fmap (^. _x) $ vs
maxy = maximum . fmap (^. _y) $ vs
maxv = V2 maxx maxy
dv = fmap (\v -> if v `approxEq` 0 then 1.0 else v) $ maxv - minv
triangulate :: Vector (V3 Float) -> Vector (V3 Float, V3 Float, V3 Float)
triangulate vs
| V.length vs < 3 = V.empty
| colinearAll vs = V.empty
| otherwise =
fmap (\(v1, v2, v3) -> (fromV2 v1, fromV2 v2, fromV2 v3))
. V.fromList . D.triangulate . V.toList . fmap toV2 $ v2snorm
where
plane = planeFromPoints (V.unsafeIndex vs 0) (V.unsafeIndex vs 1) (V.unsafeIndex vs 2)
(denormVec, v2snorm) = normVecs v2s
v2s = planeProject plane <$> vs
toV2 (V2 x y) = Vec.Vector2 (realToFrac x) (realToFrac y)
fromV2 (Vec.Vector2 x y) = planePoint plane . denormVec $ V2 (realToFrac x) (realToFrac y)
triangulate2D :: Vector (V2 Float) -> Vector (V2 Float, V2 Float, V2 Float)
triangulate2D vs
| V.length vs < 3 = V.empty
| colinearAll2D vs = V.empty
| otherwise =
fmap (\(v1, v2, v3) -> (fromV2 v1, fromV2 v2, fromV2 v3))
. V.fromList . D.triangulate . V.toList . fmap toV2 $ v2snorm
where
(denormVec, v2snorm) = normVecs vs
toV2 (V2 x y) = Vec.Vector2 (realToFrac x) (realToFrac y)
fromV2 (Vec.Vector2 x y) = denormVec $ V2 (realToFrac x) (realToFrac y)
| Teaspot-Studio/gore-and-ash-game | src/client/Math/Triangulate.hs | bsd-3-clause | 1,920 | 0 | 15 | 435 | 849 | 441 | 408 | 41 | 2 |
{-# OPTIONS_GHC -Wall #-}
{-# OPTIONS_GHC -Wno-missing-signatures #-}
{-# OPTIONS_GHC -fwarn-incomplete-uni-patterns #-}
module Day20 where
import Test.Hspec
import qualified Text.Megaparsec.String as P
import qualified Text.Megaparsec as P
import Utils
import Data.List.Split
import Data.List
import Data.Ord
-- Parsing
data Range = Range Int Int deriving (Show)
parser s = map parseLine (lines s)
parseLine l = let [a, b] = splitOn "-" l
in Range (read a) (read b)
inRange (Range a b) v = v >= a && v <= b
-- Input DSL
lowBound (Range a _) = a
sortIp ips = sortBy (comparing lowBound) ips
compactIp (r0@(Range a b):((Range b' c):xs))
| inRange r0 b' = compactIp (Range a (max b c) : xs)
| otherwise = r0 : compactIp ((Range b' c) : xs)
compactIp l = l
-- Problem DSL
-- utils
validIp ips x= all (\ip -> not (inRange ip x)) ips
-- FIRST problem
day ips = find (validIp ips) [0 .. ]
-- SECOND problem
day' ips = countAvailable ips 4294967295
countAvailable ((Range _ b):(xs@((Range c _):_))) maxRange = (c - b - 1) + countAvailable xs maxRange
countAvailable [(Range _ d)] maxRange = maxRange - d
countAvailable [] _ = error "WTF"
-- tests and data
-- comment out and add tests
test = hspec $ it "works" $ do
day <$> content `shouldReturn` (Just 4793564)
day' <$> content `shouldReturn` 146
fileContent = readFile "content/day20"
content = regIp . parser <$> fileContent
regIp = compactIp . sortIp
ipTests = regIp [Range 5 8, Range 0 2, Range 4 7]
-- 11h44
-- 11h53 4793564
-- 11h57
| guibou/AdventOfCode2016 | src/Day20.hs | bsd-3-clause | 1,530 | 0 | 13 | 311 | 609 | 322 | 287 | 35 | 1 |
module Network.OpenFlow.Message.OfpPort where
import Data.Word (Word32, Word16, Word8)
import qualified Data.Text as T
type HwAddr = (Word8, Word8, Word8, Word8, Word8, Word8)
data OfpPort
= OfpPort
{ portNo :: Word32
, pad :: (Word8, Word8, Word8, Word8)
, hwAddr :: HwAddr
, name :: T.Text -- ^max 16 characters
, config :: Word32
, state :: Word32
, curr :: Word32
, advertised :: Word32
, supported :: Word32
, peer :: Word32
, currSpeed :: Word32
, maxSpeed :: Word32
}
data OfpPortConfig
= OfpPortConfig
{ portDown :: Bool -- ^ 1 << 0
, noRecv :: Bool -- ^ 1 << 2
, noFwd :: Bool -- ^ 1 << 5
, noPacketIn :: Bool -- ^ 1 << 6
}
data OfpPortState
= OfpPortState
{ linkDown :: Bool -- ^ 1 << 0
, blocked :: Bool -- ^ 1 << 1
, live :: Bool -- ^ 1 << 2
}
data OfpPortNo
= OfppMax -- ^ 0xffffff00
| OfppInPort -- ^ 0xfffffff8
| OfppTable -- ^ 0xfffffff9
| OfppNormal -- ^ 0xfffffffa
| OfppFlood -- ^ 0xfffffffb
| OfppAll -- ^ 0xfffffffc
| OfppController -- ^ 0xfffffffd
| OfppLocal -- ^ 0xfffffffe
| OfppAny -- ^ 0xffffffff
| utky/openflow | src/Network/OpenFlow/Message/OfpPort.hs | bsd-3-clause | 1,220 | 0 | 9 | 398 | 259 | 175 | 84 | 39 | 0 |
module ProjectEuler.Problem006 (solution006) where
import Util
sumOfSqr :: Int -> Int
sumOfSqr n = sum (take n [ sq x | x <- [1..]])
sqrOfSum :: Int -> Int
sqrOfSum n = sq (sum (take n [1..]))
dif :: Int -> Int
dif n = (sqrOfSum n) - (sumOfSqr n)
solution006 :: Int
solution006 = dif 100
| ThermalSpan/haskell-euler | src/ProjectEuler/Problem006.hs | bsd-3-clause | 294 | 0 | 11 | 63 | 142 | 75 | 67 | 10 | 1 |
module Plugin where
import Data.Generics.Schemes
import API
resource = rsrc {
field = id listify
}
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/pdynload/poly/Plugin.hs | bsd-3-clause | 108 | 0 | 7 | 25 | 29 | 18 | 11 | 5 | 1 |
------------------------------------------------------------------------------
-- |
-- Module : GGTD.CLI.Ls
-- Copyright : (C) 2016 Samuli Thomasson
-- License : %% (see the file LICENSE)
-- Maintainer : Samuli Thomasson <[email protected]>
-- Stability : experimental
-- Portability : non-portable
------------------------------------------------------------------------------
module GGTD.CLI.Ls where
import GGTD.Base
import GGTD.CLI.Option
import GGTD.CLI.Render
import GGTD.CLI.Base
import GGTD.Sort
import Control.Lens hiding ((&), Context, Context')
import System.Console.Command
-- | Tree
lsAction :: Action IO
lsAction =
withNonOption (nodeOptType (-1)) $ \nodeP ->
foldingOpts filters $ \fltr ->
withOption sortOpt $ \srt ->
handler $ fromNodeP nodeP >>= \case
Nothing -> return ()
Just node -> do
node' <- if node == -1 then use viewContext else return node
printChildren fltr srt node'
| SimSaladin/ggtd | src/GGTD/CLI/Ls.hs | bsd-3-clause | 1,068 | 0 | 20 | 274 | 200 | 113 | 87 | -1 | -1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Configure
-- Copyright : (c) David Himmelstrup 2005,
-- Duncan Coutts 2005
-- License : BSD-like
--
-- Maintainer : [email protected]
-- Portability : portable
--
-- High level interface to configuring a package.
-----------------------------------------------------------------------------
module Distribution.Client.Configure (
configure,
) where
import Distribution.Client.Dependency
import qualified Distribution.Client.InstallPlan as InstallPlan
import Distribution.Client.InstallPlan (InstallPlan)
import Distribution.Client.IndexUtils as IndexUtils
( getAvailablePackages, getInstalledPackages )
import Distribution.Client.Setup
( ConfigExFlags(..), configureCommand, filterConfigureFlags )
import Distribution.Client.Types as Available
import Distribution.Client.SetupWrapper
( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions )
import Distribution.Simple.Compiler
( CompilerId(..), Compiler(compilerId)
, PackageDB(..), PackageDBStack )
import Distribution.Simple.Program (ProgramConfiguration )
import Distribution.Simple.Setup
( ConfigFlags(..), toFlag, flagToMaybe, fromFlagOrDefault )
import Distribution.Client.PackageIndex (PackageIndex)
import Distribution.Simple.Utils
( defaultPackageDesc )
import Distribution.Package
( Package(..), packageName, Dependency(..), thisPackageVersion )
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.PackageDescription.Configuration
( finalizePackageDescription )
import Distribution.Version
( anyVersion, thisVersion )
import Distribution.Simple.Utils as Utils
( notice, info, debug, die )
import Distribution.System
( Platform, buildPlatform )
import Distribution.Verbosity as Verbosity
( Verbosity )
import Data.Monoid (Monoid(..))
-- | Configure the package found in the local directory
configure :: Verbosity
-> PackageDBStack
-> [Repo]
-> Compiler
-> ProgramConfiguration
-> ConfigFlags
-> ConfigExFlags
-> [String]
-> IO ()
configure verbosity packageDBs repos comp conf
configFlags configExFlags extraArgs = do
installed <- getInstalledPackages verbosity comp packageDBs conf
available <- getAvailablePackages verbosity repos
progress <- planLocalPackage verbosity comp configFlags configExFlags
installed available
notice verbosity "Resolving dependencies..."
maybePlan <- foldProgress logMsg (return . Left) (return . Right)
progress
case maybePlan of
Left message -> do
info verbosity message
setupWrapper verbosity (setupScriptOptions installed) Nothing
configureCommand (const configFlags) extraArgs
Right installPlan -> case InstallPlan.ready installPlan of
[pkg@(ConfiguredPackage (AvailablePackage _ _ (LocalUnpackedPackage _)) _ _)] ->
configurePackage verbosity
(InstallPlan.planPlatform installPlan)
(InstallPlan.planCompiler installPlan)
(setupScriptOptions installed)
configFlags pkg extraArgs
_ -> die $ "internal error: configure install plan should have exactly "
++ "one local ready package."
where
setupScriptOptions index = SetupScriptOptions {
useCabalVersion = maybe anyVersion thisVersion
(flagToMaybe (configCabalVersion configExFlags)),
useCompiler = Just comp,
-- Hack: we typically want to allow the UserPackageDB for finding the
-- Cabal lib when compiling any Setup.hs even if we're doing a global
-- install. However we also allow looking in a specific package db.
usePackageDB = if UserPackageDB `elem` packageDBs
then packageDBs
else packageDBs ++ [UserPackageDB],
usePackageIndex = if UserPackageDB `elem` packageDBs
then Just index
else Nothing,
useProgramConfig = conf,
useDistPref = fromFlagOrDefault
(useDistPref defaultSetupScriptOptions)
(configDistPref configFlags),
useLoggingHandle = Nothing,
useWorkingDir = Nothing
}
logMsg message rest = debug verbosity message >> rest
-- | Make an 'InstallPlan' for the unpacked package in the current directory,
-- and all its dependencies.
--
planLocalPackage :: Verbosity -> Compiler
-> ConfigFlags -> ConfigExFlags
-> PackageIndex InstalledPackage
-> AvailablePackageDb
-> IO (Progress String String InstallPlan)
planLocalPackage verbosity comp configFlags configExFlags installed
(AvailablePackageDb _ availablePrefs) = do
pkg <- readPackageDescription verbosity =<< defaultPackageDesc verbosity
let -- We create a local package and ask to resolve a dependency on it
localPkg = AvailablePackage {
packageInfoId = packageId pkg,
Available.packageDescription = pkg,
packageSource = LocalUnpackedPackage "."
}
resolverParams =
addPreferences
-- preferences from the config file or command line
[ PackageVersionPreference name ver
| Dependency name ver <- configPreferences configExFlags ]
. addConstraints
-- version constraints from the config file or command line
[ PackageVersionConstraint name ver
| Dependency name ver <- configConstraints configFlags ]
. addConstraints
-- package flags from the config file or command line
[ PackageFlagsConstraint (packageName pkg)
(configConfigurationsFlags configFlags) ]
$ standardInstallPolicy
installed
(AvailablePackageDb mempty availablePrefs)
[SpecificSourcePackage localPkg]
return (resolveDependencies buildPlatform (compilerId comp) resolverParams)
-- | Call an installer for an 'AvailablePackage' but override the configure
-- flags with the ones given by the 'ConfiguredPackage'. In particular the
-- 'ConfiguredPackage' specifies an exact 'FlagAssignment' and exactly
-- versioned package dependencies. So we ignore any previous partial flag
-- assignment or dependency constraints and use the new ones.
--
configurePackage :: Verbosity
-> Platform -> CompilerId
-> SetupScriptOptions
-> ConfigFlags
-> ConfiguredPackage
-> [String]
-> IO ()
configurePackage verbosity platform comp scriptOptions configFlags
(ConfiguredPackage (AvailablePackage _ gpkg _) flags deps) extraArgs =
setupWrapper verbosity
scriptOptions (Just pkg) configureCommand configureFlags extraArgs
where
configureFlags = filterConfigureFlags configFlags {
configConfigurationsFlags = flags,
configConstraints = map thisPackageVersion deps,
configVerbosity = toFlag verbosity
}
pkg = case finalizePackageDescription flags
(const True)
platform comp [] gpkg of
Left _ -> error "finalizePackageDescription ConfiguredPackage failed"
Right (desc, _) -> desc
| yihuang/cabal-install | Distribution/Client/Configure.hs | bsd-3-clause | 7,579 | 0 | 20 | 1,994 | 1,274 | 704 | 570 | 133 | 5 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Map.Syntax.Tests where
------------------------------------------------------------------------------
import qualified Data.List as L
import Data.Function (on)
import qualified Data.Map as M
import Data.Monoid (mempty, mappend)
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.HUnit (assertEqual)
import Data.Map.Syntax
import Data.Map.Syntax.Util (mkMapABC, mkMapDEF,mkMapAEF,
ArbMapSyntax(..))
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- |Simple tests for not-nested maps
insTests :: Spec
insTests = do
it "Insert overwrite" overDup
it "Insert over fail" failDup
it "Reject duplicate" skipDup
it "Trying dupFunc" dupFunc
prop "Insert overwrite from list" prop_syntaxMatchesNubOver
prop "Insert conditional from list" prop_syntaxMatchesNubCond
prop "Insert error on dup from list" prop_syntaxMatchesNubErr
monoidLaws :: Spec
monoidLaws = do
prop "Left identity" prop_leftId
prop "Right identity" prop_rightId
prop "Associativity" prop_assoc
------------------------------------------------------------------------------
-- |Simple tests of ##, #!, #?
overDup :: IO ()
overDup = assertEqual "Failed to overwrite duplicate entry"
(Right $ M.fromList [("firstName","Egon") :: (String,String)])
(runMap $ mkDupMap (##))
failDup :: IO ()
failDup = assertEqual "Failed to error on duplicate entry"
(Left [("firstName" :: String)])
(runMap $ mkDupMap (#!))
skipDup :: IO ()
skipDup = assertEqual "Failed to reject duplicate entry"
(Right $ M.fromList [("firstName","Peter")])
(runMap $ mkDupMap (#?))
dupFunc :: IO ()
dupFunc = assertEqual "Failed use dupFunc"
(Right $ M.fromList [("firstName","firstNamePeterEgon")
:: (String,String)])
(runMapSyntax' f M.lookup M.insert $ mkDupMap (#!))
where
f k v v1 = Just (k `mappend` v1 `mappend` v)
mkDupMap :: (String -> String -> MapSyntax String String)
-> MapSyntax String String
mkDupMap strat = do
"firstName" `strat` "Peter"
"firstName" `strat` "Egon"
------------------------------------------------------------------------------
prop_syntaxMatchesNubOver :: [(String,Int)] -> Bool
prop_syntaxMatchesNubOver pairs = Right revNubMap == (runMap mSyntax)
where mSyntax = mapM_ (\(k,v) -> (k ## v)) pairs
revNubMap = M.fromList . L.nubBy ((==) `on` fst) . L.reverse $ pairs
-- Nub keeps the first of each unique entry, so reverse list to
-- simulate keeping the last
prop_syntaxMatchesNubCond :: [(String,Int)] -> Bool
prop_syntaxMatchesNubCond pairs = Right nubMap == (runMap mSyntax)
where mSyntax = mapM_ (\(k,v) -> (k #? v)) pairs
nubMap = M.fromList . L.nubBy ((==) `on` fst) $ pairs
prop_syntaxMatchesNubErr :: [(String,Int)] -> Bool
prop_syntaxMatchesNubErr pairs =
let mMap = runMap $ mapM_ (\(k,v) -> (k #! v)) pairs
in if pairs == L.nubBy ((==) `on` fst) pairs
then mMap == (Right . M.fromList $ pairs)
else case mMap of
Right _ -> False -- We expected (Left dupKeys)
Left _ -> True -- Wasn't sure about semantics here
-- runMap ... ("a" #! 1) >> ("a" #! 2) >> ("a" #! 3)
-- should be (Left ["a"]), or (Left ["a","a"])?
------------------------------------------------------------------------------
-- |Tests for #! when do blocks are nested
nestingTests :: Spec
nestingTests = do
it "Nested error dups" nestedErr
it "Nested error dups mapK" nestedErrMapK
it "Nester error dups mapV" nestedErrMapV
it "Nested overwrite dups" nestedOver
it "Nested overwrite dups mapK" nestedOverMapK
it "Nested overwrite dups mapV" nestedOverMapV
it "Nested ignore dups mixed" nestedIgnoreMix
it "Nested complex pass" nestedComplex
it "Nested complex error" nestedComplexErr
nestedErr :: IO ()
nestedErr = assertEqual "Failed to error on duplicates across do blocks"
(Left ['E','F'])
(runMap $ do {mkMapDEF (#!); mkMapAEF (#!)})
nestedErrMapK :: IO ()
nestedErrMapK = assertEqual "Failed to error on mapK'ed dups across blocks"
(Left ['B'])
(runMap $ do
mapK succ $ mkMapABC (#!)
mapK succ $ mkMapAEF (#!)
)
nestedErrMapV :: IO ()
nestedErrMapV = assertEqual "Failed to error on mapV'ed dups across blocks"
(Left ['A'])
(runMap $ do
mapV succ $ mkMapABC (#!)
mapV succ $ mkMapAEF (#!)
)
nestedOver :: IO ()
nestedOver = assertEqual "Failed to overwrite dup entries across blocks"
(Right $ M.fromList
[('A',100),('B',2),('C',3),('E',200),('F',300)])
(runMap $ do
mkMapABC (##)
mkMapAEF (##)
)
nestedOverMapK :: IO ()
nestedOverMapK = assertEqual "Failed to mapK in nested blocks"
(Right $ M.fromList
[('A',100),('E',200),('F',300),('C',10),('D',20),('B',2)])
(runMap $ do
mkMapABC (##)
mapK pred $ mkMapDEF (##)
mkMapAEF (##)
)
nestedOverMapV :: IO ()
nestedOverMapV = assertEqual "Failed to mapV in nested blocks"
(Right $ M.fromList
[('A',99),('B',2),('C',3),('E',199),('F',299)])
(runMap $ do
mkMapABC (##)
mapV pred $ mkMapAEF (##)
)
nestedIgnoreMix :: IO ()
nestedIgnoreMix = assertEqual "Failed to mapK/mapV in 'Ignore' do blocks"
(Right $ M.fromList
[('B',0),('C',1),('D',2),('E',31),('@',101)])
(runMap $ do
mapV pred . mapK succ $ mkMapABC (#?)
mapV succ . mapK pred $ mkMapDEF (#?)
mapK pred . mapV succ $ mkMapAEF (#?)
)
nestedComplex :: IO ()
nestedComplex = assertEqual "Failed a mix of dup strategies in nested block"
(Right $ M.fromList
[('@',1),('A',2),('B',1000),('C',1000),('D',10),('E',20),('F',30),('G',300),('H',199),('I',299)])
(runMap $ do
mapK succ . mapK succ $ mkMapABC (##)
mapK succ . mapK succ . mapK succ . mapV pred $
mkMapAEF (#?)
mapK succ ((mapV (const 1000) $ mkMapABC (##)) >>
mkMapAEF (#?))
mkMapDEF (##)
mapK pred $ mkMapABC (#?)
)
nestedComplexErr :: IO ()
nestedComplexErr = assertEqual
"Failed to detect dup in complex nested block"
(Left ['B'])
(runMap $ do
mapK succ . mapK succ $ mkMapABC (##)
mapK succ . mapK succ . mapK succ . mapV pred $
mkMapAEF (#?)
mapK succ ((mapV (const 1000) $ mkMapABC (##)) >>
mkMapAEF (#?))
mapK pred $ mkMapABC (#!)
mkMapDEF (##)
mapK pred $ mkMapABC (#?)
)
------------------------------------------------------------------------------
-- |Monoid Laws
prop_leftId :: ArbMapSyntax String Int -> Bool
prop_leftId a = runMap (mempty `mappend` m) == runMap m
where m = unArbSyntax a
prop_rightId :: ArbMapSyntax String Int -> Bool
prop_rightId a = runMap (m `mappend` mempty) == runMap m
where m = unArbSyntax a
prop_assoc :: ArbMapSyntax String Int
-> ArbMapSyntax String Int
-> ArbMapSyntax String Int
-> Bool
prop_assoc a' b' c' =
runMap ((a `mappend` b) `mappend` c) ==
runMap (a `mappend` (b `mappend` c))
where a = unArbSyntax a'
b = unArbSyntax b'
c = unArbSyntax c'
| mightybyte/map-syntax | test/Data/Map/Syntax/Tests.hs | bsd-3-clause | 8,274 | 0 | 17 | 2,600 | 2,310 | 1,247 | 1,063 | 164 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Main (main) where
import Control.Error.Util (hush)
import Control.Monad (unless, void, (>=>), when, forever)
import qualified Control.Monad.RWS as RWS
import qualified Data.Aeson as JSON
import qualified Data.ByteString as B
import qualified Data.Configurator as Conf
import Data.Maybe (fromJust, fromMaybe, isNothing)
import qualified Data.Random as R
import qualified Data.Random.Distribution.Bernoulli as R
import Data.Semigroup ((<>))
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Paths_Dikunt
import Pipes ((>->))
import qualified Pipes as P
import qualified Pipes.Prelude as P
import qualified Pipes.Prelude.Text as PT
import Prelude hiding (lines)
import qualified System.Environment as S
import qualified System.IO as S
import qualified Text.Parsec as Parse
import qualified Text.Parsec.Number as Parse
import qualified Types.BotTypes as BT
type BotNick = String
type Probability = Double
data Request
= HelpRequest
| ChangeProbabilityRequest Probability
| OtherMessage
deriving (Show, Eq)
main :: IO ()
main = do
(botnick:_) <- S.getArgs
S.hSetBuffering S.stdout S.LineBuffering
S.hSetBuffering S.stdin S.LineBuffering
-- Load configuration.
configName <- getDataFileName "data/dikunt.config"
config <- Conf.load [ Conf.Required configName ]
initialProb <- Conf.require config "asked-probability"
let pipe = PT.stdinLn >-> P.map T.encodeUtf8 >-> parseRequest >->
handleRequest >-> PT.stdoutLn
runAsked botnick initialProb $ P.runEffect pipe
type Asked = RWS.RWST BotNick () (R.RVar Bool) (R.RVarT IO)
runAsked :: BotNick -> Probability -> Asked a -> IO a
runAsked botnick initialProb a =
R.runRVarT (fst <$> RWS.evalRWST a botnick (R.bernoulli initialProb))
R.StdRandom
parseRequest :: P.Pipe B.ByteString Request Asked ()
parseRequest = forever $ do
botnick <- RWS.ask
req <- (JSON.decodeStrict >=> parse botnick) <$> P.await
unless (isNothing req) $ P.yield (fromJust req)
where
parse botnick (BT.ServerPrivMsg _ _ msg)
= Just
. fromMaybe OtherMessage
. hush
. Parse.parse (request botnick) ""
. BT.getMessage
$ msg
parse _ _ = Nothing
handleRequest :: P.Pipe Request T.Text Asked ()
handleRequest = forever $ do
req <- P.await
case req of
HelpRequest -> giveHelp
ChangeProbabilityRequest newProb -> updateProb newProb
OtherMessage -> handleOther
giveHelp :: P.Pipe Request T.Text Asked ()
giveHelp = do
botnick <- RWS.ask
P.yield $ T.pack botnick <> ": asked help - Display this message"
P.yield $ T.pack botnick <> ": asked set probability <0-1> - Set " <>
"probability to a number between 0 and 1"
P.yield $ T.pack "Otherwise prints \"Spurgt\" with the current " <>
"probability to each message"
updateProb :: Probability -> P.Pipe Request T.Text Asked ()
updateProb = RWS.put . R.bernoulli >=> const (P.yield "Updated probability")
handleOther :: P.Pipe Request T.Text Asked ()
handleOther = do
b <- RWS.get
shouldAsk <- P.lift . RWS.lift $ R.sample b
when shouldAsk $ P.yield "Spurgt!"
type RequestParser a = Parse.Parsec String () a
request :: BotNick -> RequestParser Request
request botnick = do
void $ stringToken (botnick ++ ": ")
void $ stringToken "asked "
Parse.choice [helpRequest, changeRequest] <* Parse.eof
helpRequest :: RequestParser Request
helpRequest = stringToken "help" *> return HelpRequest
changeRequest :: RequestParser Request
changeRequest = do
void $ stringToken "set "
void $ stringToken "probability "
newprob <- Parse.floating
if newprob >= 0.0 && newprob <= 1.0
then return $ ChangeProbabilityRequest newprob
else Parse.unexpected "Probability should be between 0 and 1"
token :: RequestParser a -> RequestParser a
token tok = Parse.spaces *> tok <* Parse.spaces
stringToken :: String -> RequestParser String
stringToken = token . Parse.string
| bus000/Dikunt | plugins/Asked/Main.hs | bsd-3-clause | 4,050 | 0 | 15 | 817 | 1,216 | 642 | 574 | -1 | -1 |
module Lib
(
module CmdLineProcessing
) where
import CmdLineProcessing
| ajjaic/dokuwiki-template | src/Lib.hs | bsd-3-clause | 85 | 0 | 4 | 23 | 13 | 9 | 4 | 4 | 0 |
{-# LANGUAGE TemplateHaskell, ScopedTypeVariables #-}
module UUTReaderUtilities where
import Control.Monad
import Language.Haskell.TH
import Sized
import Arbitrary
import Data.String.Utils
import System.IO.Unsafe
import UUT
import UUTReaderUtilitiesDeep
----------Recognize user defined types --------------------------------
notDefTypesQMonad :: Q [String] -> Q [String]
notDefTypesQMonad xs = do list <- xs
return (notDefTypes list)
notDefTypes :: [String] -> [String]
notDefTypes xs = removeDuplicates notDefinedTypes
where notDefinedTypes = concat (map defTypes separated)
separated = map (\x -> split_str x "") xs
split_str :: String -> String -> [String]
split_str [] saved = [saved]
split_str (x:xs) saved
| x == ' ' = saved:(split_str xs "")
|otherwise = split_str xs (saved++[x])
defTypes :: [String] -> [String]
defTypes [] = []
defTypes (x:[])
| (definedBaseTypes x) = []
| otherwise = [x]
defTypes (x:xs) = (keepUndefinedComplex x (length xs))++(keepUndefinedSimples xs)
keepUndefinedComplex :: String -> Int -> [String]
keepUndefinedComplex x n
| (definedComplexTypes x) = []
| otherwise = [x ++ " " ++(listOfVariables n)]
listOfVariables :: Int -> String
listOfVariables 0 = ""
listOfVariables 1 = "x1"
listOfVariables n = ("x"++(show n)) ++ " " ++ (listOfVariables (n-1))
keepUndefinedSimples :: [String] -> [String]
keepUndefinedSimples [] = []
keepUndefinedSimples (x:xs)
| (definedBaseTypes x) = keepUndefinedSimples xs
| otherwise = x:(keepUndefinedSimples xs)
definedBaseTypes :: String -> Bool
definedBaseTypes x = (x == "Int" || x == "Char" || x == "Bool")
definedComplexTypes :: String -> Bool
definedComplexTypes x = (x == "[]" || x == "(,)" || x == "(,,)" || x == "(,,,)" || x == "Array")
removeDuplicates :: Eq a => [a] -> [a]
removeDuplicates = rdHelper []
where rdHelper seen [] = seen
rdHelper seen (x:xs)
| x `elem` seen = rdHelper seen xs
| otherwise = rdHelper (seen ++ [x]) xs
--------------------Prueba function------------------------------------
prueba listArgs = ((pos_f filtered_pre output), (listArgs), (filtered_pre))
where
filtered_pre = pre_f listArgs
output = fun_f filtered_pre
pre_f listArgs = filter (prec_f_aux) listArgs
fun_f filtered_list = map fun_f_aux filtered_list
pos_f inputs outputs = zipWith pos_f_aux inputs outputs
--------------generators for auxiliar prueba functions-------------------------
prec_f_aux $(tupleP uutNargs) = $(appsE ((varE 'uutPrec):(map varE (listVar uutNargs))))
fun_f_aux $(tupleP uutNargs) = $(appsE ((varE 'uutMethod):(map varE (listVar uutNargs))))
pos_f_aux $(tupleP uutNargs) $(varP $ mkName "o") = $(appsE ((varE 'uutPost):((map varE (listVar uutNargs))++[varE $ mkName "o"]))) | pegartillo95/CaseGenerator | src/UUTReaderUtilities.hs | bsd-3-clause | 3,496 | 0 | 15 | 1,194 | 1,045 | 540 | 505 | 60 | 7 |
module Mental.Tree.Typed
( TypedTree
, getType
) where
import Control.Comonad.Cofree (Cofree(..))
import Mental.Tree
import Mental.Type
type TypedTree = AnnTree Ty
getType :: TypedTree -> Ty
getType (ty :< _) = ty
| romac/mental | src/Mental/Tree/Typed.hs | bsd-3-clause | 255 | 0 | 7 | 72 | 73 | 44 | 29 | 9 | 1 |
{-# LANGUAGE TemplateHaskell #-}
import Options.Applicative
import TH
data Command = Create String | Delete Arg
deriving (Show, Read)
data Arg = Arg Int String
deriving (Show, Read)
$(derive ''Command)
data S1
= SA
| SB [Int] Char
| SC P1
| SD S2
deriving (Show, Read)
data P1 = P1 Int Char
deriving (Show, Read)
data S3 = S3A Int Int | S3B Int Char
deriving (Show, Read)
data S2 = R | G | B
deriving (Show, Read)
$(derive ''S1)
main :: IO ()
main = do
x <- execParser p_S1_toplevel
print x
| tranma/optparse-th | test.hs | bsd-3-clause | 528 | 15 | 9 | 132 | 226 | 120 | 106 | 25 | 1 |
module Common.Aeson where
import Data.Aeson.Types (Options)
import Data.Aeson.TH (constructorTagModifier, defaultOptions,
fieldLabelModifier)
import Data.String.Additional (camelToUnderscore)
jsonOptions :: Int -> Options
jsonOptions n = defaultOptions { fieldLabelModifier = camelToUnderscore . drop n
, constructorTagModifier = camelToUnderscore }
| k-bx/redis-tlz | src/Common/Aeson.hs | bsd-3-clause | 453 | 0 | 8 | 138 | 83 | 50 | 33 | 8 | 1 |
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecursiveDo #-}
import Reflex.Dom
import Data.Map (Map)
import Data.Monoid
import Data.Foldable
import Data.Bifunctor
import Data.Word
import Data.Function
import Control.Monad
import Data.List
import Text.Read
import qualified Data.Map as Map
import Control.Monad.IO.Class
import Marbles.Eval
import qualified GHCJS.DOM.Types as Dom
import qualified GHCJS.DOM.Event as Dom
import qualified GHCJS.DOM.EventM as Dom
import qualified GHCJS.DOM.UIEvent as Dom
import qualified GHCJS.DOM.Element as Dom
import qualified GHCJS.DOM.Document as Dom
mouseLocal :: (Dom.IsElement e, Dom.IsUIEvent uiE) => e -> uiE -> IO (Int, Int)
mouseLocal e event = do
x <- Dom.uiEventGetLayerX event
y <- Dom.uiEventGetLayerY event
ex <- Dom.elementGetOffsetLeft e
ey <- Dom.elementGetOffsetTop e
return (x - round ex, y - round ey)
mouseMove_ :: (Dom.IsElement e, MonadWidget t m) => e -> m (Event t (Int, Int))
mouseMove_ e = wrapDomEvent e Dom.elementOnmousemove (liftIO . mouseLocal e =<< Dom.event)
mouseDown_ :: (Dom.IsElement e, MonadWidget t m) => e -> m (Event t (Int, Int))
mouseDown_ e = wrapDomEvent e Dom.elementOnmousedown (liftIO . mouseLocal e =<< Dom.event)
mouseUp_ :: (Dom.IsElement e, MonadWidget t m) => e -> m (Event t (Int, Int))
mouseUp_ e = wrapDomEvent e Dom.elementOnmouseup (liftIO . mouseLocal e =<< Dom.event)
isMouseDown :: (Dom.IsElement e, MonadWidget t m) => e -> m (Behavior t Bool)
isMouseDown e = do
mouseDown <- mouseDown_ e
mouseUp <- mouseUp_ e
hold False $ leftmost [const False <$> mouseUp, const True <$> mouseDown]
draggableDiv :: MonadWidget t m
=> String -- ^ class
-> Dynamic t Int
-> m (Dynamic t Int)
draggableDiv = undefined
main :: IO ()
main = mainWidget $ do
divClass "container" $ do
holdDemo
return ()
functionHeader name firstParam = do
elClass "form" "form-inline functionHeader" $ do
elClass "p" "form-control-static" $ text (name ++ " :: ")
firstParam
functionArgument content = do
divClass "argument" $ do
elClass "span" "glyphicon glyphicon-arrow-right" $ pure ()
content
holdDemo = do
divClass "demo panel panel-default" $ do
rec
inMarble <- divClass "row" $ do
divClass "col-md-1 functionName" $ text "hold ::"
divClass "col-md-11" $ marbleInput
(parent, inEvent) <- elAttr' "div" ("class" =: "row") $ do
divClass "col-md-1 functionName" $ text "→"
divClass "col-md-11" $ inputMarbleEvent parent $ Map.fromList [(20, Marble 'a'), (60, Marble 'b')]
divClass "row" $ do
divClass "col-md-1 functionName" $ text "→"
divClass "col-md-11" $ do
outEvent <- $(qDyn [| evalBehaviour [1..100] $ marbleHold' $(unqDyn [| inMarble |])
(MarbleEvent $(unqDyn [| inEvent |]))
|])
renderMarbleBehaviour =<< mapDyn snd outEvent
return outEvent
return ()
marbleInput :: (MonadWidget t m, Reflex t) => m (Dynamic t Marble)
marbleInput = divClass "marbleInput" $ do
mapDyn (Marble . maybe '⊥' fst . uncons) =<< (_textInput_value <$> textInput config)
where config = def { _textInputConfig_initialValue = "⊥"
, _textInputConfig_attributes = constDyn
( "maxlength" =: "1"
<> "size" =: "1"
<> "class" =: "form-control")
}
unMarble (Marble x) = x
inputMarbleEvent :: (MonadWidget t m)
=> El t
-> Map Time Marble
-> m (Dynamic t (Map Time Marble))
inputMarbleEvent parent event0 = do
divClass "marbleEvent" $ do
divClass "timeline" $ pure ()
r <- forM (Map.toList event0) $ \(t, Marble c) -> do
rec mouseDown <- mouseDown_ $ _el_element slider
mouseUp <- mouseUp_ $ _el_element parent
isDown <- hold False $ leftmost [const False <$> mouseUp, const True <$> mouseDown]
evMove <- mouseMove_ $ _el_element parent
dynLeft <- mapDyn fst =<< (holdDyn (t * 10,0) $ gate isDown evMove)
sliderAttrs <- forDyn dynLeft $ \left -> ("class" =: "marble" <> "style" =: ("left: " ++ show left ++ "px"))
(slider, _) <- elDynAttr' "div" sliderAttrs $ text [c]
(,) <$> mapDyn (`div` 10) dynLeft <*> pure (constDyn $ Marble c)
mconcatDyn =<< mapM (mapDyn (uncurry Map.singleton)) =<< mapM (uncurry (combineDyn (,))) r
test :: MonadWidget t m => m ()
test = do
rec
(parent, _) <- elAttr' "div" ("id" =: "parent") $ do
rec
mouseDown <- mouseDown_ $ _el_element slider
mouseUp <- mouseUp_ $ _el_element parent
isDown <- hold False $ leftmost [const False <$> mouseUp, const True <$> mouseDown]
evMove <- mouseMove_ $ _el_element parent
(slider, _) <- elDynAttr' "div" sliderAttrs $ pure ()
sliderAttrs <- forDyn dynLeft $ \left -> ("id" =: "slider" <> "style" =: ("left: " ++ show (left * 10) ++ "px"))
dynLeft <- mapDyn fst =<< (holdDyn (0,0) $ gate isDown evMove)
return ()
return ()
renderMarbleBehaviour :: (MonadWidget t m)
=> (Dynamic t (Map Time Marble))
-> m (Dynamic t (Map Time Marble))
renderMarbleBehaviour marblesDyn = do
divClass "marbleBehaviour" $ do
divClass "timeline" $ pure ()
marblesList <- mapDyn (map ((\(ts, m:_) -> ((head ts, length ts) , m)) . unzip) . groupBy ((==) `on` snd) . Map.toList) marblesDyn
simpleList marblesList $ \msDyn -> do
dyn =<< (forDyn msDyn $ \((t, l), Marble c) -> do
elAttr "div" ("class" =: "marble" <> "style" =: ("left: " ++ show (t * 10) ++ "px;" ++ "width: " ++ show (l * 10) ++ "px;")) $ do
text $ [c])
return marblesDyn
| zudov/reflex-marbles | src/Main.hs | bsd-3-clause | 6,021 | 2 | 32 | 1,625 | 2,143 | 1,062 | 1,081 | 129 | 1 |
{-# language CPP #-}
{-# language QuasiQuotes #-}
{-# language TemplateHaskell #-}
#ifndef ENABLE_INTERNAL_DOCUMENTATION
{-# OPTIONS_HADDOCK hide #-}
#endif
-- | Interface between OpenCV and inline-c(pp) (Haskell)
module OpenCV.Internal.C.Inline ( openCvCtx ) where
import "base" Foreign.Ptr ( FunPtr )
import "base" Data.Monoid ( (<>) )
import qualified "containers" Data.Map as M
import qualified "inline-c" Language.C.Inline as C
import qualified "inline-c" Language.C.Types as C
import qualified "inline-c" Language.C.Inline.Context as C
import qualified "inline-c-cpp" Language.C.Inline.Cpp as C
import "this" OpenCV.Internal.C.Types
-- | Context useful to work with the OpenCV library
--
-- Based on 'C.cppCtx', 'C.bsCtx' and 'C.vecCtx'.
--
-- 'C.ctxTypesTable': converts OpenCV basic types to their counterparts in
-- "OpenCV.Internal.C.Inline".
--
-- No 'C.ctxAntiQuoters'.
openCvCtx :: C.Context
openCvCtx = C.cppCtx <> C.bsCtx <> C.vecCtx <> ctx
where
ctx = mempty { C.ctxTypesTable = openCvTypesTable }
openCvTypesTable :: C.TypesTable
openCvTypesTable = M.fromList
[ ( C.TypeName "bool" , [t| C.CInt |] )
, ( C.TypeName "Exception" , [t| C'CvCppException |] )
, ( C.TypeName "Matx12f" , [t| C'Matx12f |] )
, ( C.TypeName "Matx12d" , [t| C'Matx12d |] )
, ( C.TypeName "Matx13f" , [t| C'Matx13f |] )
, ( C.TypeName "Matx13d" , [t| C'Matx13d |] )
, ( C.TypeName "Matx14f" , [t| C'Matx14f |] )
, ( C.TypeName "Matx14d" , [t| C'Matx14d |] )
, ( C.TypeName "Matx16f" , [t| C'Matx16f |] )
, ( C.TypeName "Matx16d" , [t| C'Matx16d |] )
, ( C.TypeName "Matx21f" , [t| C'Matx21f |] )
, ( C.TypeName "Matx21d" , [t| C'Matx21d |] )
, ( C.TypeName "Matx22f" , [t| C'Matx22f |] )
, ( C.TypeName "Matx22d" , [t| C'Matx22d |] )
, ( C.TypeName "Matx23f" , [t| C'Matx23f |] )
, ( C.TypeName "Matx23d" , [t| C'Matx23d |] )
, ( C.TypeName "Matx31f" , [t| C'Matx31f |] )
, ( C.TypeName "Matx31d" , [t| C'Matx31d |] )
, ( C.TypeName "Matx32f" , [t| C'Matx32f |] )
, ( C.TypeName "Matx32d" , [t| C'Matx32d |] )
, ( C.TypeName "Matx33f" , [t| C'Matx33f |] )
, ( C.TypeName "Matx33d" , [t| C'Matx33d |] )
, ( C.TypeName "Matx34f" , [t| C'Matx34f |] )
, ( C.TypeName "Matx34d" , [t| C'Matx34d |] )
, ( C.TypeName "Matx41f" , [t| C'Matx41f |] )
, ( C.TypeName "Matx41d" , [t| C'Matx41d |] )
, ( C.TypeName "Matx43f" , [t| C'Matx43f |] )
, ( C.TypeName "Matx43d" , [t| C'Matx43d |] )
, ( C.TypeName "Matx44f" , [t| C'Matx44f |] )
, ( C.TypeName "Matx44d" , [t| C'Matx44d |] )
, ( C.TypeName "Matx51f" , [t| C'Matx51f |] )
, ( C.TypeName "Matx51d" , [t| C'Matx51d |] )
, ( C.TypeName "Matx61f" , [t| C'Matx61f |] )
, ( C.TypeName "Matx61d" , [t| C'Matx61d |] )
, ( C.TypeName "Matx66f" , [t| C'Matx66f |] )
, ( C.TypeName "Matx66d" , [t| C'Matx66d |] )
, ( C.TypeName "Vec2i" , [t| C'Vec2i |] )
, ( C.TypeName "Vec2f" , [t| C'Vec2f |] )
, ( C.TypeName "Vec2d" , [t| C'Vec2d |] )
, ( C.TypeName "Vec3i" , [t| C'Vec3i |] )
, ( C.TypeName "Vec3f" , [t| C'Vec3f |] )
, ( C.TypeName "Vec3d" , [t| C'Vec3d |] )
, ( C.TypeName "Vec4i" , [t| C'Vec4i |] )
, ( C.TypeName "Vec4f" , [t| C'Vec4f |] )
, ( C.TypeName "Vec4d" , [t| C'Vec4d |] )
, ( C.TypeName "Point2i" , [t| C'Point2i |] )
, ( C.TypeName "Point2f" , [t| C'Point2f |] )
, ( C.TypeName "Point2d" , [t| C'Point2d |] )
, ( C.TypeName "Point3i" , [t| C'Point3i |] )
, ( C.TypeName "Point3f" , [t| C'Point3f |] )
, ( C.TypeName "Point3d" , [t| C'Point3d |] )
, ( C.TypeName "Size2i" , [t| C'Size2i |] )
, ( C.TypeName "Size2f" , [t| C'Size2f |] )
, ( C.TypeName "Size2d" , [t| C'Size2d |] )
, ( C.TypeName "Rect2i" , [t| C'Rect2i |] )
, ( C.TypeName "Rect2f" , [t| C'Rect2f |] )
, ( C.TypeName "Rect2d" , [t| C'Rect2d |] )
, ( C.TypeName "RotatedRect" , [t| C'RotatedRect |] )
, ( C.TypeName "TermCriteria", [t| C'TermCriteria|] )
, ( C.TypeName "Scalar" , [t| C'Scalar |] )
, ( C.TypeName "Mat" , [t| C'Mat |] )
, ( C.TypeName "Range" , [t| C'Range |] )
, ( C.TypeName "KeyPoint" , [t| C'KeyPoint |] )
, ( C.TypeName "DMatch" , [t| C'DMatch |] )
--, ( C.TypeName "MSER" , [t| C'MSER |] )
, ( C.TypeName "Ptr_ORB" , [t| C'Ptr_ORB |] )
--, ( C.TypeName "BRISK" , [t| C'BRISK |] )
--, ( C.TypeName "KAZE" , [t| C'KAZE |] )
--, ( C.TypeName "AKAZE" , [t| C'AKAZE |] )
, ( C.TypeName "Ptr_SimpleBlobDetector", [t| C'Ptr_SimpleBlobDetector |] )
, ( C.TypeName "BFMatcher" , [t| C'BFMatcher |] )
, ( C.TypeName "Ptr_BackgroundSubtractorKNN" , [t| C'Ptr_BackgroundSubtractorKNN |] )
, ( C.TypeName "Ptr_BackgroundSubtractorMOG2", [t| C'Ptr_BackgroundSubtractorMOG2 |] )
, ( C.TypeName "VideoCapture", [t| C'VideoCapture |] )
, ( C.TypeName "VideoWriter" , [t| C'VideoWriter |] )
, ( C.TypeName "CascadeClassifier", [t| C'CascadeClassifier |] )
, ( C.TypeName "Ptr_GrayCodePattern", [t| C'Ptr_GrayCodePattern |] )
, ( C.TypeName "MouseCallback" , [t| FunPtr C'MouseCallback |] )
, ( C.TypeName "TrackbarCallback", [t| FunPtr C'TrackbarCallback |] )
]
| lukexi/haskell-opencv | src/OpenCV/Internal/C/Inline.hs | bsd-3-clause | 5,789 | 0 | 9 | 1,761 | 1,529 | 1,026 | 503 | -1 | -1 |
-- Example: Single-Lane Traffic Analysis
--
-- It is described in different sources [1, 2]. So, this is chapter 15 of [2] and section 6.18 of [1].
--
-- The system to be modeled in this example consists of the traffic flow from
-- two directions along a two-lane road, one lane of which has been closed for
-- 500 meters for repairs. Traffic lights have been placed at each end of
-- the closed lane to control the flow of traffic through the repair section.
-- The lights allow traffic to flow for a specified time interval from only
-- one direction. When a light turns green, the waiting cars start and pass
-- the light every two seconds. If a car arrives at a green light when there
-- are no waiting cars, the car passes through the light without delay. The car
-- arrival pattern is exponentially distributed, with an average of 9 seconds
-- between cars from direction 1 and 12 seconds between cars from direction 2.
-- A light cycle consists of green in direction 1, both red, green in direction 2,
-- both red, and then the cycle is repeated. Both lights remain red for 55 seconds
-- to allow the cars in transit to leave the repair section before traffic from
-- the other direction can be initiated.
--
-- The objective is to simulate the above system to determine values for
-- the green time for direction 1 and the green time for direction 2 which
-- yield a low average waiting time for all cars.
--
-- [1] A. Alan B. Pritsker, Simulation with Visual SLAM and AweSim, 2nd ed.
-- [2] Труб И.И., Объектно-ориентированное моделирование на C++: Учебный курс. - СПб.: Питер, 2006
module Model (model1, model2, model3) where
import Control.Monad
import Control.Monad.Trans
import Control.Arrow
import Data.Array
import Simulation.Aivika
import qualified Simulation.Aivika.Resource as R
data LightTime =
LightTime { greenLightTime1 :: Double,
greenLightTime2 :: Double }
model :: LightTime -> Simulation Results
model lightTime = do
let greenLightTime =
array (1, 2)
[(1, return $ greenLightTime1 lightTime :: Event Double),
(2, return $ greenLightTime2 lightTime :: Event Double)]
waitTime1 <- newRef emptySamplingStats
waitTime2 <- newRef emptySamplingStats
let waitTime =
array (1, 2) [(1, waitTime1), (2, waitTime2)]
start1 <-
runEventInStartTime $
R.newFCFSResource 1
start2 <-
runEventInStartTime $
R.newFCFSResource 1
let start =
array (1, 2) [(1, start1), (2, start2)]
light1 <- newGateClosed
light2 <- newGateClosed
let stream1 = randomExponentialStream 9
stream2 = randomExponentialStream 12
runProcessInStartTime $
flip consumeStream stream1 $ \x ->
liftEvent $
runProcess $
do R.requestResource start1
awaitGateOpened light1
t <- liftDynamics time
liftEvent $
modifyRef waitTime1 $
addSamplingStats (t - arrivalTime x)
when (t > arrivalTime x) $
holdProcess 2
R.releaseResource start1
runProcessInStartTime $
flip consumeStream stream2 $ \x ->
liftEvent $
runProcess $
do R.requestResource start2
awaitGateOpened light2
t <- liftDynamics time
liftEvent $
modifyRef waitTime2 $
addSamplingStats (t - arrivalTime x)
when (t > arrivalTime x) $
holdProcess 2
R.releaseResource start2
let lighting =
do holdProcess 55
liftEvent $
openGate light1
holdProcess $
greenLightTime1 lightTime
liftEvent $
closeGate light1
holdProcess 55
liftEvent $
openGate light2
holdProcess $
greenLightTime2 lightTime
liftEvent $
closeGate light2
lighting
runProcessInStartTime lighting
return $
results
[resultSource
"start" "Start Resource"
start,
--
resultSource
"waitTime" "Wait Time"
waitTime,
--
resultSource
"greenLightTime" "Green Light Time"
greenLightTime]
modelSummary :: LightTime -> Simulation Results
modelSummary lightTime =
fmap resultSummary $ model lightTime
lightTime1 = LightTime 60 45
lightTime2 = LightTime 80 60
lightTime3 = LightTime 40 30
model1 = model lightTime1
model2 = model lightTime2
model3 = model lightTime3
modelSummary1 = fmap resultSummary model1
modelSummary2 = fmap resultSummary model2
modelSummary3 = fmap resultSummary model3
| dsorokin/aivika-experiment-chart | examples/SingleLaneTraffic/Model.hs | bsd-3-clause | 4,540 | 0 | 16 | 1,119 | 810 | 404 | 406 | 98 | 1 |
helloTo "Yoshikuni" = "Good morning, sir."
helloTo n = "Hello, " ++ n ++ "!"
| YoshikuniJujo/funpaala | samples/05_function/helloTo.hs | bsd-3-clause | 77 | 0 | 6 | 15 | 25 | 12 | 13 | 2 | 1 |
module Intel.SGX.Types where
import Text.Printf
import qualified Data.ByteString.Lazy as L
import Data.Word (Word64, Word32, Word16, Word8)
data SECS = SECS {
secsSize :: Word64 -- Size of enclave in bytes; must be power of 2
, secsBaseAddr :: Word64 -- Enclave Base Linear Address must be naturally aligned to size
, secsSSAFrameSize :: Word32 -- Size of one SSA frame in pages (including XSAVE, pad, GPR, and conditionally MISC).
, secsMiscSelect :: MiscSelect -- See MiscSecelct data type
, secsReserved_byte24_47 :: L.ByteString -- Reserved bytes. Set to zero
, secsAttr :: Attributes -- See Attributes data type
, secsMrEnclave :: L.ByteString -- 256-bit SHA256 hash
, secsReserved_byte96_127 :: L.ByteString -- Reserved bytes. Set to zero
, secsMrSigner :: L.ByteString -- 256-bit SHA256 hash of signer public key *after* enclave sig was verified
, secsReserved_byte160_255 :: L.ByteString -- Reserved bytes. Set to zero
, secsISVProdId :: Word16 -- Product ID of enclave
, secsISVSVN :: Word16 -- Security version number (SVN) of the enclave
, secsReserved_byte260_4095 :: L.ByteString -- 3836 bytes of wasted space
}
data MiscSelect = MiscSelect {
miscExInfo :: Bool -- Report information about page fault and general protection exception that occurred inside an enclave
, miscReserved_bit1_32 :: Word32 -- Wasted space
} deriving (Eq)
data Attributes = Attributes {
attrInit :: Bool -- if the enclave has been initialized by EINIT
, attrDebug :: Bool -- If 1, the enclave permit debugger to read and write enclave data
, attrMode64Bit :: Bool -- Enclave runs in 64-bit mode
, attrReserved_bit3 :: Bool -- Must be zero
, attrProvisionKey :: Bool -- Provisioning Key is available from EGETKEY
, attrEinitTokenKey :: Bool -- EINIT token key is available from EGETKEY
, attrReserved_bit6_63 :: L.ByteString -- Reserved. Set to zero.
, attrXFRM :: XFRM -- See XFRM data type
}deriving(Eq, Show)
data XFRM = XFRM {
xfrmEnabled :: Bool
, xfrmXCR0 :: Word64 -- Valid value of XCR0
, xfrmHasXSave :: Bool -- Does the CPU has XSAVE instruction
} deriving(Eq, Show)
data PageInfo = PageInfo {
pgEnclaveLinAddr :: Word64 -- Enclave linear address.
, pgSourceAddr :: Word64 -- Effective address of the page where contents are located.
, pgSecInfo :: Word64 -- Effective address of the SECINFO or PCMD
, pgSecs :: Word64 -- Effective address of EPC slot that currently contains the SECS
}
data SecInfo = SecInfo {
siFlags :: SecInfoFlags
}
data SecInfoFlags = SecInfoFlags {
sifIsRead :: Bool
, sifIsWrite :: Bool
, sifIsExecute :: Bool
, sifIsPending :: Bool
, sifIsModified :: Bool
, sifHasPermRestriction :: Bool
, sifReserved_bit6_7 :: Word8
, sifPageType :: PageType
, sifReserved_bit16_64 :: Word64
}
data PageType = PT_SECS
| PT_TCS
| PT_REG
| PT_VA
| PT_TRIM
deriving (Show, Eq)
instance Enum PageType where
toEnum 0 = PT_SECS
toEnum 1 = PT_TCS
toEnum 2 = PT_REG
toEnum 3 = PT_VA
toEnum 4 = PT_TRIM
toEnum _ = undefined
fromEnum PT_SECS = 0
fromEnum PT_TCS = 1
fromEnum PT_REG = 2
fromEnum PT_VA = 3
fromEnum PT_TRIM = 4
data PCMD = PCMD {
pcmdSecInfo :: SecInfo
, pcmdEnclaveId :: Word64
, pcmdReserved_byte72_111 :: L.ByteString
, pcmdMac :: L.ByteString -- 16-Bytes
}
data SigStruct = SigStruct{
ssHeader1 :: SigStructHeader -- 16 bytes. Signed
, ssVendor :: SigStructVendor -- 4 bytes. Signed
, ssBuildDate :: SigStructDate -- 4 bytes. Signed
, ssHeader2 :: SigStructHeader -- 16 bytes. Signed
, ssSwDefined :: Word32 -- 4 bytes. Signed
, ssReserved_byte44_127 :: L.ByteString -- 84 bytes of zero. Signed
, ssModulus :: Integer -- 384 bytes. Not signed
, ssExponent :: Word32 -- 4 bytes. Not signed
, ssSignature :: Integer -- 384 bytes. Not signed
, ssMiscSelect :: MiscSelect -- 4 bytes. Signed
, ssMiscMask :: MiscSelect -- 4 bytes. Signed
, ssReserved_byte908_927 :: L.ByteString -- 20 bytes of zero. Signed
, ssAttributes :: Attributes -- 16 bytes. Signed
, ssAttributesMask :: Attributes -- 16 bytes. Signed
, ssEnclaveHash :: L.ByteString -- 32 bytes of SHA256 of enclave. Signed
, ssReserved_byte992_1023 :: L.ByteString -- 32 bytes of zero. Signed
, ssIsvProdId :: Word16 -- 2 bytes. Signed
, ssIsvSvn :: Word16 -- 2 bytes. Signed
, ssReserved_byte1028_1039 :: L.ByteString -- 12 bytes of zero. Not signed
, ssQ1 :: Integer -- 384 bytes of Q1
, ssQ2 :: Integer -- 284 bytes of Q2
}
data EInitToken = EInitToken {
eitDebug :: Bool -- 4 Bytes. MACed
, eitReserved_byte4_47 :: L.ByteString -- 44 Bytes of Zero. MACed
, eitAttributes :: Attributes -- 16 Bytes. MACed
, eitMrEnclave :: L.ByteString -- 32 Bytes. MACed
, eitReserved_byte96_127 :: L.ByteString -- 32 Bytes of Zero. MACed
, eitMtSigner :: L.ByteString -- 32 Bytes. MACed
, eitReserved_byte160_191 :: L.ByteString -- 32 Bytes. MACed.
, eitCpuSvnLe :: CPUSVN -- 16 Bytes. Not MACed
, eitIsvProdIdLe :: Word16 -- 2 Bytes. Not MACed
, eitIsvSvnLe :: Word16 -- 2 Bytes. Not MACed
, eitReserved_byte212_235 :: L.ByteString -- Reserved. Not MACed
, eitMaskedMiscSelectLe :: Word32 -- Returned by the Launch Enclave
, eitMaskedAttributes :: Attributes -- Returned by the Launch Enclave
, eitKeyId :: L.ByteString -- 32-bytes of KeyID protection. Not MACed
, eitMAC :: L.ByteString -- 16 bytes of final MAC.
}
data SigStructHeader = SSHeader{
ssHeaderValue :: Integer
}
ssHeaderVal1 :: SigStructHeader
ssHeaderVal1 = SSHeader 0x06000000E10000000000010000000000
ssHeaderVal2 :: SigStructHeader
ssHeaderVal2 = SSHeader 0x01010000600000006000000001000000
data SigStructVendor = SSVendorIntel
| SSVendorOther
deriving (Show, Eq)
instance Enum SigStructVendor where
toEnum 0x8086 = SSVendorIntel
toEnum 0x0 = SSVendorOther
toEnum _ = undefined
fromEnum SSVendorIntel = 0x8086
fromEnum SSVendorOther = 0x0
data SigStructDate = SSDate {
ssYear :: Year
, ssMonth :: Month
, ssDay :: Day
} deriving (Show, Eq)
data Year = Year {
y1 :: Word8
, y2 :: Word8
, y3 :: Word8
, y4 :: Word8
} deriving (Eq)
instance Show Year where
show (Year a1 a2 a3 a4) =
printf "%.1x%.1x%.1x%.1x" a1 a2 a3 a4
data Month = Jan | Feb | Mar | Apr
| May | Jun | Jul | Aug
| Sep | Oct | Nov | Dec
deriving( Show, Eq)
data Day = Day Word8 deriving (Eq)
instance Enum Day where
succ (Day x) = Day $ if x == 31
then 0
else succ x
pred (Day x) = Day $ if x == 0
then 31
else pred x
toEnum = Day . fromIntegral
fromEnum (Day x) = fromIntegral x
instance Show Day where
show (Day d) = show d
data TCS = TCS {
tcsReserved_byte0_7 :: Word64
, tcsFlags :: TCSFlags
, tcsOSSA :: Word64
, tcsCSSA :: Word32
, tcsNSSA :: Word32
, tcsOentry :: Word64
, tcsAep :: Word64
, tcsOFSBasSgx :: Word64
, tcsOGSBasSgx :: Word64
, tcsFSLimit :: Word32
, tcsGSLimit :: Word32
, tcsReserved_byte72_4095 :: L.ByteString
}
data TCSFlags = TCSFlags {
tcsFlagsDebugOptIn :: Bool
, tcsFlagsReserved_bit1_63 :: Word64
}
data SSAFrame = SSAFrame {
ssaXsave :: L.ByteString
, ssaPad :: L.ByteString
, ssaMisc :: L.ByteString
, ssaGprSgx :: L.ByteString
}
data GPRSGX = GPRSGX {
gprRAX :: Word64
, gprRCX :: Word64
, gprRDX :: Word64
, gprRBX :: Word64
, gprRSP :: Word64
, gprRBP :: Word64
, gprRSI :: Word64
, gprRDI :: Word64
, gprR8 :: Word64
, gprR9 :: Word64
, gprR10 :: Word64
, gprR11 :: Word64
, gprR12 :: Word64
, gprR13 :: Word64
, gprR14 :: Word64
, gprR15 :: Word64
, gprRFLAGS :: Word64
, gprRIP :: Word64
, gprURSP :: Word64
, gprURBP :: Word64
, gprExitInfo :: ExitInfo
, gprReserved_byte164_167 :: Word32
, gprFsBase :: Word64
, gprGsBase :: Word64
}
data ExitInfo = ExitInfo {
eiVector :: ExcptVector
, eiType :: ExitInfoType
, eiReserved_bit11_30 :: Word32
, eiValid :: Bool
}
data ExitInfoType = ExitTypeHwExcept |
ExitTypeSwExcept
instance Enum ExitInfoType where
fromEnum ExitTypeHwExcept = 0x3
fromEnum ExitTypeSwExcept = 0x6
toEnum 0x3 = ExitTypeHwExcept
toEnum 0x6 = ExitTypeSwExcept
toEnum _ = undefined
data ExcptVector = DividerExcpt
| DebugExcpt
| BreakpointExcpt
| BoundRangeExceedExcpt
| InvalidOpCodeExcpt
| GeneralProtectionExcpt
| PageFaultExcpt
| FPUErrorExcept
| AlignmentCheckExcept
| SIMDException
deriving (Show, Eq)
instance Enum ExcptVector where
fromEnum DividerExcpt = 0
fromEnum DebugExcpt = 1
fromEnum BreakpointExcpt = 3
fromEnum BoundRangeExceedExcpt = 5
fromEnum InvalidOpCodeExcpt = 6
fromEnum GeneralProtectionExcpt = 13
fromEnum PageFaultExcpt = 14
fromEnum FPUErrorExcept = 16
fromEnum AlignmentCheckExcept = 17
fromEnum SIMDException = 19
toEnum 0 = DividerExcpt
toEnum 1 = DebugExcpt
toEnum 3 = BreakpointExcpt
toEnum 5 = BoundRangeExceedExcpt
toEnum 6 = InvalidOpCodeExcpt
toEnum 13 = GeneralProtectionExcpt
toEnum 14 = PageFaultExcpt
toEnum 16 = FPUErrorExcept
toEnum 17 = AlignmentCheckExcept
toEnum 19 = SIMDException
toEnum _ = undefined
data Report = Report {
repCpuSvn :: CPUSVN -- 16 Bytes
, repMiscSelect :: MiscSelect -- 4 Bytes
, repReserved_byte20_47 :: L.ByteString -- reseved 28 bytes of Zero
, repAttributes :: Attributes -- 16 bytes
, repMrEnclave :: L.ByteString -- 32 bytes
, repReserved_byte96_127 :: L.ByteString -- 32 bytes of zero
, repMrSigner :: L.ByteString -- 32 bytes of signger
, repReserved_byte160_255 :: L.ByteString -- 32 bytes of zero
, repIsvProdId :: Word16 -- 2 bytes
, repIsvSvn :: Word16 -- 2 bytes
, repReserved_byte260_319 :: L.ByteString -- 60 bytes of zero
, repReportData :: L.ByteString -- 64 bytes of report data
, repKeyId :: L.ByteString -- 32-bytes of key id
, repMAC :: L.ByteString -- 16-bytes of Mac
}
data CPUSVN = CPUSVN {
cpuSvnValue :: L.ByteString
}
data TargetInfo = TargetInfo {
tiTargetMrEnclave :: L.ByteString -- 32 bytes of MrEnclave
, tiAttributes :: Attributes -- 16 bytes of attributes
, tiReserved_byte48_51 :: Word32 -- 4 bytes reserved
, tiMiscSelect :: MiscSelect -- 4 bytes of MiscSelect
, tiReserved_byte56_511 :: L.ByteString -- Reserved to zero
}
data KeyRequest = KeyRequest {
krKeyName :: KeyName -- 2 bytes
, krKeyPolicy :: KeyPolicy -- 2 bytes
, krIsvSvn :: Word16 -- 2 bytes
, krReserved_byte6_7 :: Word16 -- 2 bytes
, krCpuSvn :: CPUSVN -- 16 bytes
, krAttributeMask :: Attributes -- 16 bytes
, krKeyId :: L.ByteString -- 32 bytes
, krMiscMask :: MiscSelect -- 4 bytes
, krReserved_byte76_511 :: L.ByteString -- 436 bytes of zero
}
data KeyName = EINIT_TOKEN_KEY
| PROVISION_KEY
| PROVISION_SEAL_KEY
| REPORT_KEY
| SEAL_KEY
deriving (Show, Eq)
instance Enum KeyName where
toEnum 0 = EINIT_TOKEN_KEY
toEnum 1 = PROVISION_KEY
toEnum 2 = PROVISION_SEAL_KEY
toEnum 3 = REPORT_KEY
toEnum 4 = SEAL_KEY
toEnum _ = undefined
fromEnum EINIT_TOKEN_KEY = 0
fromEnum PROVISION_KEY = 1
fromEnum PROVISION_SEAL_KEY = 2
fromEnum REPORT_KEY = 3
fromEnum SEAL_KEY = 4
data KeyPolicy = KeyPolicy {
kpIsMrEnclave :: Bool
, kpIsMrSigner :: Bool
, kpReserved_bit2_15 :: Word16
}
| axelexic/sgx-bunny | src/Intel/SGX/Types.hs | bsd-3-clause | 13,570 | 0 | 9 | 4,720 | 2,323 | 1,418 | 905 | 307 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Existentials.CRCPolynomial
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : experimental
--
-- Test suite for Data.SBV.Examples.Existentials.CRCPolynomial
-----------------------------------------------------------------------------
module TestSuite.Existentials.CRCPolynomial(testSuite) where
import Data.SBV
import Data.SBV.Internals
import Data.SBV.Examples.Existentials.CRCPolynomial
import SBVTest
-- Test suite
testSuite :: SBVTestSuite
testSuite = mkTestSuite $ \goldCheck -> test [
"crcPolyExist" ~: pgm `goldCheck` "crcPolyExist.gold"
]
where pgm = runSymbolic (True, Nothing) $ do
p <- exists "poly"
s <- do sh <- forall "sh"
sl <- forall "sl"
return (sh, sl)
r <- do rh <- forall "rh"
rl <- forall "rl"
return (rh, rl)
output $ sbvTestBit p 0 &&& crcGood 4 p s r
| Copilot-Language/sbv-for-copilot | SBVUnitTest/TestSuite/Existentials/CRCPolynomial.hs | bsd-3-clause | 1,115 | 0 | 14 | 301 | 213 | 114 | 99 | 17 | 1 |
{-# LANGUAGE RecordWildCards, ConstraintKinds, FlexibleContexts #-}
module Erisbot.Plugins where
import Erisbot.Types
import Control.Monad
import Control.Monad.IO.Class
import Control.Lens
import Control.Concurrent.Lifted
import Control.Exception.Lifted
import Data.HashMap.Strict as HashMap
import System.Plugins.Load
import System.Mem.Weak
import System.Timeout
import System.FilePath
withCurrentPlugin :: BotMonad s bot => Plugin -> bot a -> bot a
withCurrentPlugin p b = do
prevPlugin <- use currentPlugin
bracket_ (currentPlugin .= Just p) (currentPlugin .= prevPlugin) b
getPlugin :: FilePath -> Bot s (LoadStatus Plugin)
getPlugin path = do
conf <- readMVar =<< use botConf
let pluginD = pluginDir conf
path'
| isRelative path = pluginD </> path
| otherwise = path
includes = pluginDir conf : pluginIncludeDirs conf
liftIO $ load_ path' [] "plugin"
loadPlugin :: String -> FilePath -> Bot s (LoadStatus Plugin)
loadPlugin pluginName filePath = do
debugMsg $ "Loading " ++ pluginName
loadResult <- getPlugin filePath
case loadResult of
LoadFailure errs -> do
debugMsg $ "The following errors occured when loading " ++ filePath
mapM_ debugMsg errs
LoadSuccess mod plugin -> do
debugMsg $ "Loading " ++ pluginName ++ " successful"
let pluginState = PluginState plugin mod []
plugsVar <- use pluginMap
modifyMVar_ plugsVar $ \plugMap -> do
plugMap' <- unloadPlugin_ pluginName plugMap
return $ HashMap.insert pluginName pluginState plugMap'
withCurrentPlugin plugin $ forkBot_ (onLoad plugin)
return loadResult
unloadPlugin_ :: String -> HashMap PluginName PluginState
-> Bot s (HashMap PluginName PluginState)
unloadPlugin_ pluginName pluginMap = do
case HashMap.lookup pluginName pluginMap of
Just PluginState{..} -> do
forkBot_ $ do
currentPlugin .= Just pluginData
s <- copyBotState ()
liftIO $ do
void . timeout 10000000 . runBot s . onUnload $ pluginData
forM_ pluginThreads $
maybe (return ()) killThread <=< deRefWeak
return $ HashMap.delete pluginName pluginMap
Nothing -> return pluginMap
unloadPlugin :: String -> Bot s ()
unloadPlugin pluginName = do
plugsVar <- use pluginMap
modifyMVar_ plugsVar (unloadPlugin_ pluginName)
| kallisti-dev/erisbot | src/Erisbot/Plugins.hs | bsd-3-clause | 2,395 | 0 | 23 | 552 | 699 | 330 | 369 | 61 | 2 |
{-# LANGUAGE FlexibleContexts, GeneralizedNewtypeDeriving, OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Module : Network.MPD.Core
-- Copyright : (c) Ben Sinclair 2005-2009, Joachim Fasting 2010
-- License : MIT (see LICENSE)
-- Maintainer : Joachim Fasting <[email protected]>
-- Stability : alpha
--
-- The core datatypes and operations are defined here, including the
-- primary instance of the 'MonadMPD' class, 'MPD'.
module Network.MPD.Core (
-- * Classes
MonadMPD(..),
-- * Data types
MPD, MPDError(..), ACKType(..), Response, Host, Port, Password,
-- * Running
withMPDEx,
-- * Interacting
getResponse, kill,
) where
import Network.MPD.Util
import Network.MPD.Core.Class
import Network.MPD.Core.Error
import Data.Char (isDigit)
import Control.Applicative (Applicative(..), (<$>), (<*))
import qualified Control.Exception as E
import Control.Monad (ap, unless)
import Control.Monad.Error (ErrorT(..), MonadError(..))
import Control.Monad.Reader (ReaderT(..), ask)
import Control.Monad.State (StateT, MonadIO(..), modify, gets, evalStateT)
import qualified Data.Foldable as F
import Network (PortID(..), withSocketsDo, connectTo)
import System.IO (Handle, hPutStrLn, hReady, hClose, hFlush)
import System.IO.Error (isEOFError, tryIOError)
import qualified System.IO.UTF8 as U
import Text.Printf (printf)
import qualified Prelude
import Prelude hiding (break, drop, dropWhile, read)
import Data.ByteString.Char8 (ByteString, isPrefixOf, break, drop, dropWhile)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.UTF8 as UTF8
--
-- Data types.
--
type Host = String
type Port = Integer
--
-- IO based MPD client implementation.
--
-- | The main implementation of an MPD client. It actually connects
-- to a server and interacts with it.
--
-- To use the error throwing\/catching capabilities:
--
-- > import Control.Monad.Error (throwError, catchError)
--
-- To run IO actions within the MPD monad:
--
-- > import Control.Monad.Trans (liftIO)
newtype MPD a =
MPD { runMPD :: ErrorT MPDError
(StateT MPDState
(ReaderT (Host, Port) IO)) a
} deriving (Functor, Monad, MonadIO, MonadError MPDError)
instance Applicative MPD where
(<*>) = ap
pure = return
instance MonadMPD MPD where
open = mpdOpen
close = mpdClose
send = mpdSend
getPassword = MPD $ gets stPassword
setPassword pw = MPD $ modify (\st -> st { stPassword = pw })
getVersion = MPD $ gets stVersion
-- | Inner state for MPD
data MPDState =
MPDState { stHandle :: Maybe Handle
, stPassword :: String
, stVersion :: (Int, Int, Int)
}
-- | A response is either an 'MPDError' or some result.
type Response = Either MPDError
-- | The most configurable API for running an MPD action.
withMPDEx :: Host -> Port -> Password -> MPD a -> IO (Response a)
withMPDEx host port pw x = withSocketsDo $
runReaderT (evalStateT (runErrorT . runMPD $ open >> (x <* close)) initState)
(host, port)
where initState = MPDState Nothing pw (0, 0, 0)
mpdOpen :: MPD ()
mpdOpen = MPD $ do
(host, port) <- ask
runMPD close
mHandle <- liftIO (safeConnectTo host port)
modify (\st -> st { stHandle = mHandle })
F.forM_ mHandle $ \_ -> runMPD checkConn >>= (`unless` runMPD close)
where
safeConnectTo host@('/':_) _ =
(Just <$> connectTo "" (UnixSocket host))
`E.catch` (\(_ :: E.SomeException) -> return Nothing)
safeConnectTo host port =
(Just <$> connectTo host (PortNumber $ fromInteger port))
`E.catch` (\(_ :: E.SomeException) -> return Nothing)
checkConn = do
[msg] <- send ""
if "OK MPD" `isPrefixOf` msg
then MPD $ checkVersion $ parseVersion msg
else return False
checkVersion Nothing = throwError $ Custom "Couldn't determine MPD version"
checkVersion (Just version)
| version < requiredVersion =
throwError $ Custom $ printf
"MPD %s is not supported, upgrade to MPD %s or above!"
(formatVersion version) (formatVersion requiredVersion)
| otherwise = do
modify (\st -> st { stVersion = version })
return True
where
requiredVersion = (0, 15, 0)
parseVersion = parseTriple '.' parseNum . dropWhile (not . isDigit)
formatVersion :: (Int, Int, Int) -> String
formatVersion (x, y, z) = printf "%d.%d.%d" x y z
mpdClose :: MPD ()
mpdClose =
MPD $ do
mHandle <- gets stHandle
F.forM_ mHandle $ \h -> do
modify $ \st -> st{stHandle = Nothing}
r <- liftIO $ sendClose h
F.forM_ r throwError
where
sendClose handle =
(hPutStrLn handle "close" >> hReady handle >> hClose handle >> return Nothing)
`E.catch` handler
handler err
| isEOFError err = return Nothing
| otherwise = (return . Just . ConnectionError) err
mpdSend :: String -> MPD [ByteString]
mpdSend str = send' `catchError` handler
where
handler err
| ConnectionError e <- err, isRetryable e = mpdOpen >> send'
| otherwise = throwError err
send' :: MPD [ByteString]
send' = MPD $ gets stHandle >>= maybe (throwError NoMPD) go
go handle = (liftIO . tryIOError $ do
unless (null str) $ U.hPutStrLn handle str >> hFlush handle
getLines handle [])
>>= either (\err -> modify (\st -> st { stHandle = Nothing })
>> throwError (ConnectionError err)) return
getLines :: Handle -> [ByteString] -> IO [ByteString]
getLines handle acc = do
l <- B.hGetLine handle
if "OK" `isPrefixOf` l || "ACK" `isPrefixOf` l
then (return . reverse) (l:acc)
else getLines handle (l:acc)
-- | Re-connect and retry for these Exceptions.
isRetryable :: E.IOException -> Bool
isRetryable e = or [ isEOFError e ]
--
-- Other operations.
--
-- | Kill the server. Obviously, the connection is then invalid.
kill :: (MonadMPD m) => m ()
kill = send "kill" >> return ()
-- | Send a command to the MPD server and return the result.
getResponse :: (MonadMPD m) => String -> m [ByteString]
getResponse cmd = (send cmd >>= parseResponse) `catchError` sendpw
where
sendpw e@(ACK Auth _) = do
pw <- getPassword
if null pw then throwError e
else send ("password " ++ pw) >>= parseResponse
>> send cmd >>= parseResponse
sendpw e =
throwError e
-- Consume response and return a Response.
parseResponse :: (MonadError MPDError m) => [ByteString] -> m [ByteString]
parseResponse xs
| null xs = throwError $ NoMPD
| "ACK" `isPrefixOf` x = throwError $ parseAck x
| otherwise = return $ Prelude.takeWhile ("OK" /=) xs
where
x = head xs
-- Turn MPD ACK into the corresponding 'MPDError'
parseAck :: ByteString -> MPDError
parseAck s = ACK ack (UTF8.toString msg)
where
ack = case code of
2 -> InvalidArgument
3 -> InvalidPassword
4 -> Auth
5 -> UnknownCommand
50 -> FileNotFound
51 -> PlaylistMax
52 -> System
53 -> PlaylistLoad
54 -> Busy
55 -> NotPlaying
56 -> FileExists
_ -> UnknownACK
(code, _, msg) = splitAck s
-- Break an ACK into (error code, current command, message).
-- ACKs are of the form:
-- ACK [error@command_listNum] {current_command} message_text\n
splitAck :: ByteString -> (Int, ByteString, ByteString)
splitAck s = (read code, cmd, msg)
where
(code, notCode) = between '[' '@' s
(cmd, notCmd) = between '{' '}' notCode
msg = drop 1 $ dropWhile (' ' ==) notCmd
-- take whatever is between 'f' and 'g'.
between a b xs = let (_, y) = break (== a) xs
in break (== b) (drop 1 y)
| beni55/libmpd-haskell | src/Network/MPD/Core.hs | mit | 8,503 | 0 | 16 | 2,636 | 2,284 | 1,256 | 1,028 | 160 | 12 |
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- |
-- Module : Aura.Commands.B
-- Copyright : (c) Colin Woodbury, 2012 - 2020
-- License : GPL3
-- Maintainer: Colin Woodbury <[email protected]>
--
-- Handle all @-B@ flags - those which involve saved package states.
module Aura.Commands.B
( saveState
, restoreState
, cleanStates
, listStates
) where
import Aura.Core (warn)
import Aura.IO
import Aura.Languages
import Aura.Settings
import Aura.State
import RIO
import RIO.Directory
import RIO.FilePath
import qualified RIO.List as L
import qualified RIO.NonEmpty as NEL
import qualified RIO.Text as T
---
-- | Remove all but the newest @n@ package states. Any "pinned" states will also remain.
cleanStates :: Settings -> Word -> IO ()
cleanStates ss (fromIntegral -> n) = do
stfs <- reverse <$> getStateFiles
(pinned, others) <- L.partition p <$> traverse (\sf -> (sf,) <$> readState sf) stfs
warn ss $ cleanStates_4 (length stfs)
unless (null pinned) . warn ss $ cleanStates_6 (length pinned)
forM_ (NEL.nonEmpty stfs) $ \stfs' -> do
let mostRecent = T.pack . takeFileName $ NEL.head stfs'
warn ss $ cleanStates_5 mostRecent
okay <- optionalPrompt ss $ cleanStates_2 n
if not okay
then warn ss cleanStates_3
else traverse_ (removeFile . fst) . drop n $ others
where
p :: (a, Maybe PkgState) -> Bool
p = maybe False pinnedOf . snd
-- | The result of @-Bl@.
listStates :: IO ()
listStates = getStateFiles >>= traverse_ (putTextLn . T.pack)
| aurapm/aura | aura/exec/Aura/Commands/B.hs | gpl-3.0 | 1,597 | 0 | 16 | 376 | 425 | 226 | 199 | 35 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.ECS.ListContainerInstances
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Returns a list of container instances in a specified cluster.
--
-- <http://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_ListContainerInstances.html>
module Network.AWS.ECS.ListContainerInstances
(
-- * Request
ListContainerInstances
-- ** Request constructor
, listContainerInstances
-- ** Request lenses
, lciCluster
, lciMaxResults
, lciNextToken
-- * Response
, ListContainerInstancesResponse
-- ** Response constructor
, listContainerInstancesResponse
-- ** Response lenses
, lcirContainerInstanceArns
, lcirNextToken
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.ECS.Types
import qualified GHC.Exts
data ListContainerInstances = ListContainerInstances
{ _lciCluster :: Maybe Text
, _lciMaxResults :: Maybe Int
, _lciNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListContainerInstances' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lciCluster' @::@ 'Maybe' 'Text'
--
-- * 'lciMaxResults' @::@ 'Maybe' 'Int'
--
-- * 'lciNextToken' @::@ 'Maybe' 'Text'
--
listContainerInstances :: ListContainerInstances
listContainerInstances = ListContainerInstances
{ _lciCluster = Nothing
, _lciNextToken = Nothing
, _lciMaxResults = Nothing
}
-- | The short name or full Amazon Resource Name (ARN) of the cluster that hosts
-- the container instances you want to list. If you do not specify a cluster,
-- the default cluster is assumed..
lciCluster :: Lens' ListContainerInstances (Maybe Text)
lciCluster = lens _lciCluster (\s a -> s { _lciCluster = a })
-- | The maximum number of container instance results returned by 'ListContainerInstances' in paginated output. When this parameter is used, 'ListContainerInstances'
-- only returns 'maxResults' results in a single page along with a 'nextToken'
-- response element. The remaining results of the initial request can be seen by
-- sending another 'ListContainerInstances' request with the returned 'nextToken'
-- value. This value can be between 1 and 100. If this parameter is not used,
-- then 'ListContainerInstances' returns up to 100 results and a 'nextToken' value
-- if applicable.
lciMaxResults :: Lens' ListContainerInstances (Maybe Int)
lciMaxResults = lens _lciMaxResults (\s a -> s { _lciMaxResults = a })
-- | The 'nextToken' value returned from a previous paginated 'ListContainerInstances'
-- request where 'maxResults' was used and the results exceeded the value of that
-- parameter. Pagination continues from the end of the previous results that
-- returned the 'nextToken' value. This value is 'null' when there are no more
-- results to return.
lciNextToken :: Lens' ListContainerInstances (Maybe Text)
lciNextToken = lens _lciNextToken (\s a -> s { _lciNextToken = a })
data ListContainerInstancesResponse = ListContainerInstancesResponse
{ _lcirContainerInstanceArns :: List "containerInstanceArns" Text
, _lcirNextToken :: Maybe Text
} deriving (Eq, Ord, Read, Show)
-- | 'ListContainerInstancesResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'lcirContainerInstanceArns' @::@ ['Text']
--
-- * 'lcirNextToken' @::@ 'Maybe' 'Text'
--
listContainerInstancesResponse :: ListContainerInstancesResponse
listContainerInstancesResponse = ListContainerInstancesResponse
{ _lcirContainerInstanceArns = mempty
, _lcirNextToken = Nothing
}
-- | The list of container instance full Amazon Resource Name (ARN) entries for
-- each container instance associated with the specified cluster.
lcirContainerInstanceArns :: Lens' ListContainerInstancesResponse [Text]
lcirContainerInstanceArns =
lens _lcirContainerInstanceArns
(\s a -> s { _lcirContainerInstanceArns = a })
. _List
-- | The 'nextToken' value to include in a future 'ListContainerInstances' request.
-- When the results of a 'ListContainerInstances' request exceed 'maxResults', this
-- value can be used to retrieve the next page of results. This value is 'null'
-- when there are no more results to return.
lcirNextToken :: Lens' ListContainerInstancesResponse (Maybe Text)
lcirNextToken = lens _lcirNextToken (\s a -> s { _lcirNextToken = a })
instance ToPath ListContainerInstances where
toPath = const "/"
instance ToQuery ListContainerInstances where
toQuery = const mempty
instance ToHeaders ListContainerInstances
instance ToJSON ListContainerInstances where
toJSON ListContainerInstances{..} = object
[ "cluster" .= _lciCluster
, "nextToken" .= _lciNextToken
, "maxResults" .= _lciMaxResults
]
instance AWSRequest ListContainerInstances where
type Sv ListContainerInstances = ECS
type Rs ListContainerInstances = ListContainerInstancesResponse
request = post "ListContainerInstances"
response = jsonResponse
instance FromJSON ListContainerInstancesResponse where
parseJSON = withObject "ListContainerInstancesResponse" $ \o -> ListContainerInstancesResponse
<$> o .:? "containerInstanceArns" .!= mempty
<*> o .:? "nextToken"
| kim/amazonka | amazonka-ecs/gen/Network/AWS/ECS/ListContainerInstances.hs | mpl-2.0 | 6,260 | 0 | 12 | 1,214 | 690 | 417 | 273 | 76 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudFront.GetDistributionConfig
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Get the configuration information about a distribution.
--
-- /See:/ <http://docs.aws.amazon.com/AmazonCloudFront/latest/APIReference/GetDistributionConfig.html AWS API Reference> for GetDistributionConfig.
module Network.AWS.CloudFront.GetDistributionConfig
(
-- * Creating a Request
getDistributionConfig
, GetDistributionConfig
-- * Request Lenses
, gdcId
-- * Destructuring the Response
, getDistributionConfigResponse
, GetDistributionConfigResponse
-- * Response Lenses
, gdcrsETag
, gdcrsDistributionConfig
, gdcrsResponseStatus
) where
import Network.AWS.CloudFront.Types
import Network.AWS.CloudFront.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The request to get a distribution configuration.
--
-- /See:/ 'getDistributionConfig' smart constructor.
newtype GetDistributionConfig = GetDistributionConfig'
{ _gdcId :: Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetDistributionConfig' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gdcId'
getDistributionConfig
:: Text -- ^ 'gdcId'
-> GetDistributionConfig
getDistributionConfig pId_ =
GetDistributionConfig'
{ _gdcId = pId_
}
-- | The distribution\'s id.
gdcId :: Lens' GetDistributionConfig Text
gdcId = lens _gdcId (\ s a -> s{_gdcId = a});
instance AWSRequest GetDistributionConfig where
type Rs GetDistributionConfig =
GetDistributionConfigResponse
request = get cloudFront
response
= receiveXML
(\ s h x ->
GetDistributionConfigResponse' <$>
(h .#? "ETag") <*> (parseXML x) <*>
(pure (fromEnum s)))
instance ToHeaders GetDistributionConfig where
toHeaders = const mempty
instance ToPath GetDistributionConfig where
toPath GetDistributionConfig'{..}
= mconcat
["/2015-04-17/distribution/", toBS _gdcId, "/config"]
instance ToQuery GetDistributionConfig where
toQuery = const mempty
-- | The returned result of the corresponding request.
--
-- /See:/ 'getDistributionConfigResponse' smart constructor.
data GetDistributionConfigResponse = GetDistributionConfigResponse'
{ _gdcrsETag :: !(Maybe Text)
, _gdcrsDistributionConfig :: !(Maybe DistributionConfig)
, _gdcrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetDistributionConfigResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gdcrsETag'
--
-- * 'gdcrsDistributionConfig'
--
-- * 'gdcrsResponseStatus'
getDistributionConfigResponse
:: Int -- ^ 'gdcrsResponseStatus'
-> GetDistributionConfigResponse
getDistributionConfigResponse pResponseStatus_ =
GetDistributionConfigResponse'
{ _gdcrsETag = Nothing
, _gdcrsDistributionConfig = Nothing
, _gdcrsResponseStatus = pResponseStatus_
}
-- | The current version of the configuration. For example: E2QWRUHAPOMQZL.
gdcrsETag :: Lens' GetDistributionConfigResponse (Maybe Text)
gdcrsETag = lens _gdcrsETag (\ s a -> s{_gdcrsETag = a});
-- | The distribution\'s configuration information.
gdcrsDistributionConfig :: Lens' GetDistributionConfigResponse (Maybe DistributionConfig)
gdcrsDistributionConfig = lens _gdcrsDistributionConfig (\ s a -> s{_gdcrsDistributionConfig = a});
-- | The response status code.
gdcrsResponseStatus :: Lens' GetDistributionConfigResponse Int
gdcrsResponseStatus = lens _gdcrsResponseStatus (\ s a -> s{_gdcrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-cloudfront/gen/Network/AWS/CloudFront/GetDistributionConfig.hs | mpl-2.0 | 4,507 | 0 | 13 | 892 | 607 | 364 | 243 | 77 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Test.AWS.IAM.Internal
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
module Test.AWS.IAM.Internal where
import Test.AWS.Prelude
| olorin/amazonka | amazonka-iam/test/Test/AWS/IAM/Internal.hs | mpl-2.0 | 613 | 0 | 4 | 140 | 25 | 21 | 4 | 4 | 0 |
module Model.Comment
-- Types
( MaxDepth(..)
, NoCommentReason(..)
, addMaxDepth
-- Utility functions
, buildCommentForest
, buildCommentTree
, commentIsApproved
, commentIsEvenDepth
, commentIsFlagged
, commentIsOddDepth
, commentIsPrivate
, commentIsTopLevel
, makeCommentUsersSet
, makeApprovedComment
-- Database actions
, approveCommentDB
, deleteCommentDB
, deleteTagDB
, deleteTagsDB
, deleteTicketDB
, deleteTicketsDB
, editCommentDB
, flagCommentDB
, fetchCommentAncestorClosuresDB
, fetchCommentAncestorRetractsDB
, fetchCommentAncestorClosuresDB'
, fetchCommentAncestorRetractsDB'
, fetchCommentsAncestorClosuresDB
, fetchCommentsAncestorRetractsDB
, fetchCommentDB
, fetchCommentAllCurrentDescendantsDB
, fetchCommentAllDescendantsDB
, fetchCommentAncestorsDB
, fetchCommentCommentTagsDB
, fetchCommentCommentTagsInDB
, fetchCommentCountDB
, fetchCommentDepthDB
, fetchCommentDepthFromMaybeParentIdDB
, fetchCommentDepth404DB
, fetchCommentDescendantsDB
, fetchCommentDestinationDB
, fetchCommentFlaggingDB
, fetchCommentRethreadDB
, fetchCommentRethreadLastDB
, fetchCommentTagsDB
, fetchCommentTagCommentTagsDB
, fetchCommentsDescendantsDB
, fetchCommentsInDB
, fetchCommentsWithChildrenInDB
, fetchCommentTicketsDB
, fetchTagIdsDB
, fetchTagNamesDB
, filterCommentsDB
, insertTagsDB
, makeClaimedTicketMapDB
, makeCommentClosingMapDB
, makeCommentRetractingMapDB
, makeCommentRouteDB
, makeFlagMapDB
, makeTicketMapDB
, makeWatchMapDB
, postApprovedCommentDB
, postUnapprovedCommentDB
, rethreadCommentDB
, subFetchCommentAncestorsDB
, userClaimCommentDB
, userUnclaimCommentDB
) where
import Import
import qualified Model.Comment.Internal as Internal
import Model.Comment.Sql
import Model.Discussion
import Model.Notification
import Model.User.Internal
( sendPreferredUserNotificationDB, NotificationSender (..)
, NotificationReceiver (..) )
import qualified Control.Monad.State as State
import Control.Monad.Writer.Strict (tell)
import qualified Data.Foldable as F
import Data.List ((\\), nub)
import qualified Data.Map as M
import Data.Maybe (fromJust)
import qualified Data.Set as S
import qualified Data.Text as T
import Data.Tree
import Database.Esqueleto.Internal.Language (Insertion)
import qualified Database.Persist as P
import qualified Prelude
--------------------------------------------------------------------------------
-- Types
-- | A root comment (with its own URL) might not be displayed. Why?
data NoCommentReason
= CommentNotFound
| CommentPermissionDenied
data MaxDepth
= NoMaxDepth
| MaxDepth Int
instance Eq MaxDepth where
NoMaxDepth == NoMaxDepth = True
MaxDepth x == MaxDepth y = x == y
_ == _ = False
instance Ord MaxDepth where
compare NoMaxDepth (MaxDepth _) = GT
compare NoMaxDepth NoMaxDepth = EQ
compare (MaxDepth _) NoMaxDepth = LT
compare (MaxDepth x) (MaxDepth y) = compare x y
addMaxDepth :: MaxDepth -> Int -> MaxDepth
addMaxDepth NoMaxDepth _ = NoMaxDepth
addMaxDepth (MaxDepth x) y = MaxDepth (x + y)
--------------------------------------------------------------------------------
-- Utility functions
commentIsApproved :: Comment -> Bool
commentIsApproved = isJust . commentApprovedTs
commentIsTopLevel :: Comment -> Bool
commentIsTopLevel = (== 0) . commentDepth
commentIsEvenDepth :: Comment -> Bool
commentIsEvenDepth comment =
not (commentIsTopLevel comment) && commentDepth comment `mod` 2 == 1
commentIsOddDepth :: Comment -> Bool
commentIsOddDepth comment =
not (commentIsTopLevel comment) && not (commentIsEvenDepth comment)
commentIsFlagged :: CommentId -> DB Bool
commentIsFlagged =
fmap (maybe False (const True)) . getBy . UniqueCommentFlagging
commentIsPrivate :: Comment -> Bool
commentIsPrivate comment = commentVisibility comment == VisPrivate
-- | Build a tree of comments, given the root and replies. The replies are
-- not necessarily direct or indirect descendants of the root, but rather
-- may be siblings, nephews, etc. This is done to greatly simplify the
-- calling code of this function.
--
-- THIS FUNCTION RELIES ON THE SORT ORDER OF THE REPLIES! Specifically,
-- they must be sorted in ascending-parent-id major, ascending-timestamp
-- minor order.
buildCommentTree :: Entity Comment -> [Entity Comment] -> Tree (Entity Comment)
buildCommentTree r rs = unfoldTree step (r,rs)
where
step :: (Entity Comment, [Entity Comment])
-> (Entity Comment, [(Entity Comment, [Entity Comment])])
step (root, replies) = (root, children_and_their_descendants)
where
descendants :: [Entity Comment]
descendants = dropWhile (not . isParentOf root) replies
-- children :: [Entity Comment]
-- children_descendants :: [Entity Comment]
(children, children_descendants) = span (isParentOf root) descendants
children_and_their_descendants :: [(Entity Comment, [Entity Comment])]
children_and_their_descendants = map (, children_descendants) children
isParentOf :: Entity Comment -> Entity Comment -> Bool
isParentOf (Entity parent_key _) (Entity _ child) =
Just parent_key == commentParent child
buildCommentForest :: [Entity Comment] -- root comments
-> [Entity Comment] -- replies comments
-> Forest (Entity Comment)
buildCommentForest roots replies = map (flip buildCommentTree replies) roots
-- | Construct a comment, auto-approved by 'this' User (because they are
-- established).
makeApprovedComment
:: UserId
-> DiscussionId
-> Maybe CommentId
-> Markdown
-> Int
-> Visibility
-> Language
-> YDB Comment
makeApprovedComment
user_id
discussion_id
parent_comment
comment_text
depth
visibility
language = do
now <- liftIO getCurrentTime
maybe_parent_visibility <- case parent_comment of
Nothing -> return Nothing
Just parent_comment_id ->
fmap (fmap commentVisibility) $ get parent_comment_id
let parent_visibility = fromMaybe VisPublic maybe_parent_visibility
return $ Comment
now
(Just now)
(Just user_id)
discussion_id
parent_comment
user_id
comment_text
depth
(min visibility parent_visibility)
language
-- | Get the set of Users that have posted the given Foldable of comments.
makeCommentUsersSet :: Foldable f => f (Entity Comment) -> Set UserId
makeCommentUsersSet = F.foldMap (S.singleton . commentUser . entityVal)
--------------------------------------------------------------------------------
-- Database actions
approveCommentDB :: UserId -> CommentId -> Comment -> SDB ()
approveCommentDB user_id comment_id comment = do
now <- liftIO getCurrentTime
-- Do an in-memory adjustment of the comment with exactly the same changes
-- as 'upd' below (so we can avoid hitting the database).
let updated_comment = comment
{ commentApprovedTs = Just now
, commentApprovedBy = Just user_id
}
lift $ do
updateComment now
deleteUnapprovedCommentNotifications
tell [ ECommentPosted comment_id updated_comment
, ECommentApproved comment_id updated_comment
]
where
updateComment now =
update $ \c -> do
set c [ CommentApprovedTs =. val (Just now)
, CommentApprovedBy =. val (Just user_id)
]
where_ (c ^. CommentId ==. val comment_id)
-- Delete all notifications sent about this pending comment, as they no
-- longer apply. Also deletes the UnapprovedCommentNotification
-- entities, the EventNotificationSent, and any other rows with a
-- foreign key on NotificationId.
deleteUnapprovedCommentNotifications = do
notif_ids <- fmap (map unValue) $
select $
from $ \unc -> do
where_ $
unc ^. UnapprovedCommentNotificationComment ==. val comment_id
return (unc ^. UnapprovedCommentNotificationNotification)
deleteCascadeWhere [UserNotificationId P.<-. notif_ids]
insertApprovedCommentDB
:: UTCTime
-> DiscussionId
-> Maybe CommentId
-> UserId
-> Markdown
-> Int
-> Visibility
-> Language
-> SDB CommentId
insertApprovedCommentDB
created_ts
discussion_id
mparent_id
user_id =
insertCommentDB
(Just created_ts)
(Just user_id)
ECommentPosted
created_ts
discussion_id
mparent_id
user_id
insertUnapprovedCommentDB
:: UTCTime
-> DiscussionId
-> Maybe CommentId
-> UserId
-> Markdown
-> Int
-> Visibility
-> Language
-> SDB CommentId
insertUnapprovedCommentDB = insertCommentDB Nothing Nothing ECommentPending
insertCommentDB :: Maybe UTCTime
-> Maybe UserId
-> (CommentId -> Comment -> SnowdriftEvent)
-> UTCTime
-> DiscussionId
-> Maybe CommentId
-> UserId
-> Markdown
-> Int
-> Visibility
-> Language
-> SDB CommentId
insertCommentDB
mapproved_ts
mapproved_by
mk_event
created_ts
discussion_id
mparent_id
user_id
text
depth
visibility
language = do
mparent <- case mparent_id of
Nothing -> return Nothing
Just parent_id -> lift $ get parent_id
let parent_visibility = maybe VisPublic commentVisibility mparent
comment = Comment
created_ts
mapproved_ts
mapproved_by
discussion_id
mparent_id
user_id
text
depth
(min visibility parent_visibility)
language
comment_id <- lift $ insert comment
tell [mk_event comment_id comment]
return comment_id
userClaimCommentDB :: UserId -> CommentId -> Maybe Text -> SDB ()
userClaimCommentDB user_id comment_id mnote = do
now <- liftIO getCurrentTime
let ticket_claiming = TicketClaiming now user_id comment_id mnote
ticket_claiming_id <- lift $ insert ticket_claiming
tell [ETicketClaimed (Left (ticket_claiming_id, ticket_claiming))]
userUnclaimCommentDB :: CommentId -> Maybe Text -> SDB ()
userUnclaimCommentDB comment_id release_note = do
maybe_ticket_claiming_entity <-
lift $ getBy $ UniqueTicketClaiming comment_id
case maybe_ticket_claiming_entity of
Nothing -> return ()
Just (Entity ticket_claiming_id TicketClaiming{..}) -> do
now <- liftIO getCurrentTime
let ticket_old_claiming = TicketOldClaiming
ticketClaimingTs
ticketClaimingUser
ticketClaimingTicket
ticketClaimingNote
release_note
now
ticket_old_claiming_id <- lift $ insert ticket_old_claiming
lift $
update $ \etc -> do
set etc
[ EventTicketClaimedClaim =. val Nothing
, EventTicketClaimedOldClaim =.
val (Just ticket_old_claiming_id) ]
where_ $
etc ^. EventTicketClaimedClaim
==. val (Just ticket_claiming_id)
lift $
delete $
from $ \tc ->
where_ $ tc ^. TicketClaimingId ==. val ticket_claiming_id
tell [ ETicketUnclaimed ticket_old_claiming_id ticket_old_claiming ]
-- | Fetch a comment from the DB, subject to viewing permissions.
fetchCommentDB
:: CommentId
-> ExprCommentCond
-> DB (Either NoCommentReason Comment)
fetchCommentDB comment_id has_permission = get comment_id >>= \case
Nothing -> do
liftIO $
appendFile "log" $ "comment not found: " ++ show comment_id ++ "\n"
return (Left CommentNotFound)
-- Hooray, the comment exists, now toss it and re-query the database
-- with the provided permission conditions. How else would we be able
-- to differentiate a non-existent comment and a comment the user
-- doesn't have permission to view?
Just _ -> fmap
(maybe
(Left CommentPermissionDenied)
(Right . entityVal) . listToMaybe) $
select $
from $ \c -> do
where_ $
c ^. CommentId ==. val comment_id &&.
has_permission c
return c
-- | Count the visible comments in a given discussion.
--
-- Visibility depends on who the user is.
fetchCommentCountDB
:: Maybe (Key User)
-> Key Project
-> Key Discussion
-> DB Int
fetchCommentCountDB muser_id project_id discussion_id = do
let has_permission =
exprCommentProjectPermissionFilter muser_id (val project_id)
roots_ids <-
map entityKey <$>
fetchDiscussionRootCommentsDB discussion_id has_permission
num_children <-
length <$> fetchCommentsDescendantsDB roots_ids has_permission
return $ length roots_ids + num_children
fetchCommentsInDB :: [CommentId] -> ExprCommentCond -> DB [Entity Comment]
fetchCommentsInDB comment_ids has_permission =
select $
from $ \c -> do
where_ $
c ^. CommentId `in_` valList comment_ids &&.
has_permission c
return c
-- | Delete-cascade a comment from the database.
deleteCommentDB :: CommentId -> DB ()
deleteCommentDB = deleteCascade
fetchTagNamesDB :: CommentId -> DB [Internal.TagName]
fetchTagNamesDB comment_id =
fmap (map Internal.TagName . unwrapValues) $
select $ from $ \(t `InnerJoin` ct) -> do
on_ $ t ^. TagId ==. ct ^. CommentTagTag
where_ $ ct ^. CommentTagComment ==. val comment_id
return $ t ^. TagName
fetchTagIdsDB :: Internal.TagName -> DB [TagId]
fetchTagIdsDB (Internal.TagName tag_name) =
fmap unwrapValues $
select $ from $ \t -> do
where_ $ t ^. TagName ==. val tag_name
return $ t ^. TagId
deleteTagDB :: CommentId -> TagId -> DB ()
deleteTagDB comment_id tag_id = do
delete $ from $ \ct ->
where_ $ ct ^. CommentTagComment ==. val comment_id
&&. ct ^. CommentTagTag ==. val tag_id
exists_ct <- selectExists $ from $ \ct ->
where_ $ ct ^. CommentTagTag ==. val tag_id
exists_pt <- selectExists $ from $ \pt ->
where_ $ pt ^. ProjectTagTag ==. val tag_id
exists_tc <- selectExists $ from $ \tc ->
where_ $ tc ^. TagColorTag ==. val tag_id
exists_dtc <- selectExists $ from $ \dtc ->
where_ $ dtc ^. DefaultTagColorTag ==. val tag_id
unless (exists_ct || exists_pt || exists_tc || exists_dtc) $
delete $ from $ \t ->
where_ $ t ^. TagId ==. val tag_id
deleteTagsDB :: CommentId -> [Internal.TagName] -> DB ()
deleteTagsDB comment_id tags =
forM_ tags $ \tag -> do
tag_ids <- fetchTagIdsDB tag
forM_ tag_ids $ deleteTagDB comment_id
deleteTicketDB :: CommentId -> Internal.TicketName -> SDB ()
deleteTicketDB comment_id (Internal.TicketName ticket_name) = do
lift $ delete $ from $ \t ->
where_ $ t ^. TicketName ==. val ticket_name
&&. t ^. TicketComment ==. val comment_id
userUnclaimCommentDB comment_id Nothing
deleteTicketsDB :: CommentId -> [Internal.TicketName] -> SDB ()
deleteTicketsDB comment_id = mapM_ $ deleteTicketDB comment_id
-- | Edit a comment's text. If the comment was flagged, unflag it and send a
-- notification to the flagger.
editCommentDB :: UserId -> CommentId -> Markdown -> Language
-> SYDB (Either Text ())
editCommentDB user_id comment_id text language =
updateComment >>= \case
Left err -> return $ Left err
Right () -> do
lift (fetchCommentFlaggingDB comment_id) >>= \case
Nothing -> return ()
Just (Entity comment_flagging_id CommentFlagging{..}) -> do
langs <- lift $ lift getLanguages
render <- getUrlRender
rendered_route <-
lift $
makeCommentRouteDB langs comment_id
>>= return . render . fromJust
let notif_text = Markdown $
"A comment you flagged has been edited and reposted to "
<> "the site. You can view it [here]("
<> rendered_route <> ")."
-- delete flagging and all flagging reasons with it.
lift $ deleteCascade comment_flagging_id
sendPreferredUserNotificationDB
(Just $ NotificationSender user_id)
(NotificationReceiver commentFlaggingFlagger)
NotifFlagRepost
Nothing
notif_text
return $ Right ()
where
updateComment = do
mexistent_ticket <- lift $ getBy $ UniqueTicket comment_id
existent_tags <- lift $ fetchTagNamesDB comment_id
let content_with_tags = unMarkdown text
content_without_tags = Markdown $ stripTags content_with_tags
content_tags = parseTags content_with_tags
new_tags = content_tags \\ existent_tags
case parseTicket content_with_tags of
Left err -> return $ Left err
Right mnew_ticket-> do
lift $ do
now <- liftIO getCurrentTime
insertTagsDB user_id comment_id new_tags
case (mexistent_ticket, mnew_ticket) of
(Just existing_ticket, Just new_ticket) ->
update $ \t -> do
set t [ TicketUpdatedTs =. val now
, TicketName =. val (unTicketName new_ticket) ]
where_ $ t ^. TicketId ==. val (entityKey existing_ticket)
(Nothing, Just new_ticket) ->
insert_ $ Ticket now now (unTicketName new_ticket) comment_id
(Just _, Nothing) -> deleteBy $ UniqueTicket comment_id
(Nothing, Nothing) -> return ()
update $ \c -> do
set c [ CommentText =. val content_without_tags
, CommentLanguage =. val language
]
where_ (c ^. CommentId ==. val comment_id)
return $ Right ()
-- | Flag a comment. Send a notification to the poster about the flagging.
-- Return whether or not the flag was successful (fails if the comment was
-- already flagged.)
flagCommentDB
:: CommentId
-> Text
-> UserId
-> [FlagReason]
-> Maybe Markdown
-> SYDB Bool
flagCommentDB comment_id permalink_route flagger_id reasons message = do
poster_id <- lift (commentUser <$> get404 comment_id)
now <- liftIO getCurrentTime
mFlaggingId <-
lift (insertUnique (CommentFlagging now flagger_id comment_id message))
flip (maybe (return False)) mFlaggingId $ \flagging_id -> do
lift $ void $
insertMany (map (CommentFlaggingReason flagging_id) reasons)
sendPreferredUserNotificationDB
(Just $ NotificationSender flagger_id)
(NotificationReceiver poster_id)
NotifFlag
Nothing
notif_text
return True
where
notif_text = Markdown $
"Another user flagged your comment as not meeting the standards "
<> "of the Code of Conduct. We *want* your involvement as long as "
<> "it remains respectful and friendly, so please don’t feel "
<> "discouraged."
<> "\n"
<> "Please follow the link below for clarification and "
<> "suggestions the flagger may have offered, and take this "
<> "chance to improve your tone and clarify any misunderstanding. "
<> "Your newly edited comment will then be publicly visible "
<> "again."
<> "\n"
<> "Please alert a moderator if you believe that this flagging "
<> "is inappropriate, if the flagger violated the Code of "
<> "Conduct in their feedback, or if you want other "
<> "assistance."
<> "\n"
<> "[link to flagged comment](" <> permalink_route <> ")"
-- | Post an new (approved) Comment.
postApprovedCommentDB
:: UserId
-> Maybe CommentId
-> DiscussionId
-> Markdown
-> Visibility
-> Language
-> SDB (Either Text CommentId)
postApprovedCommentDB = postComment insertApprovedCommentDB
postUnapprovedCommentDB
:: UserId
-> Maybe CommentId
-> DiscussionId
-> Markdown
-> Visibility
-> Language
-> SDB (Either Text CommentId)
postUnapprovedCommentDB = postComment insertUnapprovedCommentDB
parseTicket :: Text -> Either Text (Maybe Internal.TicketName)
parseTicket t = case res of
[] -> Right Nothing
[x] -> Right $ Just x
xs -> Left $ "expected 1 ticket, but got " <> T.pack (show $ length xs)
where res = filter (\(Internal.TicketName ticket) -> not $ T.null ticket) $
map (Internal.TicketName . T.strip) $
mapMaybe (T.stripPrefix "ticket:") $ T.lines t
parseTags :: Text -> [Internal.TagName]
parseTags =
nub . filter (\(Internal.TagName tag) -> not $ T.null tag) .
map (Internal.TagName . T.strip) . mconcat . map (T.splitOn ",") .
mapMaybe (T.stripPrefix "tags:") . T.lines
-- | Remove all lines starting with the "tags:" prefix.
stripTags :: Text -> Text
stripTags = T.unlines . filter (not . ("tags:" `T.isPrefixOf`)) . T.lines
insertTagsDB :: UserId -> CommentId -> [Internal.TagName] -> DB ()
insertTagsDB user_id comment_id tags =
forM_ tags $ \(Internal.TagName tag) -> do
tag_id <- either entityKey id <$> insertBy (Tag tag)
insert_ $ CommentTag comment_id tag_id user_id 1
postComment
:: (UTCTime
-> DiscussionId
-> Maybe CommentId
-> UserId
-> Markdown
-> Int
-> Visibility
-> Language
-> SDB CommentId)
-> UserId
-> Maybe CommentId
-> DiscussionId
-> Markdown
-> Visibility
-> Language
-> SDB (Either Text CommentId)
postComment
insert_comment
user_id
mparent_id
discussion_id
contents
visibility
language = do
case parseTicket $ unMarkdown contents of
Left err -> return $ Left err
Right mnew_ticket -> do
(now, depth) <- lift $ (,)
<$> liftIO getCurrentTime
<*> fetchCommentDepthFromMaybeParentIdDB mparent_id
let content_with_tags = unMarkdown contents
content_without_tags = Markdown $ stripTags content_with_tags
comment_id <- insert_comment now discussion_id mparent_id user_id
content_without_tags depth visibility language
lift $ do
F.forM_ mnew_ticket $ \new_ticket ->
insert_ $ Ticket now now (unTicketName new_ticket) comment_id
insertTagsDB user_id comment_id $ parseTags content_with_tags
case mparent_id of
Nothing -> return ()
Just parent_id ->
mapM_ (insert_ . CommentAncestor comment_id)
=<< (parent_id:) <$> fetchCommentAncestorsDB parent_id
update $ \t -> do
set t [TicketUpdatedTs =. val now]
where_ (t ^. TicketComment `in_` subFetchCommentAncestorsDB comment_id)
return $ Right comment_id
-- | Filter a list of comments per the provided permission filter.
filterCommentsDB :: [CommentId] -> ExprCommentCond -> DB [CommentId]
filterCommentsDB comment_ids has_permission = fmap (map unValue) $
select $
from $ \c -> do
where_ $
c ^. CommentId `in_` valList comment_ids &&.
has_permission c
return (c ^. CommentId)
-- | Get all ancestors that have been closed/retracted.
fetchCommentAncestorClosuresDB :: CommentId -> DB [CommentClosing]
fetchCommentAncestorRetractsDB :: CommentId -> DB [CommentRetracting]
fetchCommentAncestorClosuresDB =
commentClosuresOrRetracts CommentClosingComment
fetchCommentAncestorRetractsDB =
commentClosuresOrRetracts CommentRetractingComment
commentClosuresOrRetracts
:: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> EntityField val CommentId -> CommentId -> DB [val]
commentClosuresOrRetracts comment_field comment_id = fmap (map entityVal) $
select $
from $ \(ca `InnerJoin` table) -> do
on_ (ca ^. CommentAncestorAncestor ==. table ^. comment_field)
orderBy [asc (table ^. comment_field)]
where_ (ca ^. CommentAncestorComment ==. val comment_id)
return table
-- | Get all ancestors, including this comment, that have been closed/retracted.
fetchCommentAncestorClosuresDB' :: CommentId -> DB [CommentClosing]
fetchCommentAncestorRetractsDB' :: CommentId -> DB [CommentRetracting]
fetchCommentAncestorClosuresDB' =
commentClosuresOrRetracts' CommentClosingComment
fetchCommentAncestorRetractsDB' =
commentClosuresOrRetracts' CommentRetractingComment
commentClosuresOrRetracts'
:: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> EntityField val CommentId -> CommentId -> DB [val]
commentClosuresOrRetracts' comment_field comment_id = do
all_comment_ids <- (comment_id :) <$> fetchCommentAncestorsDB comment_id
fmap (map entityVal) $
select $
from $ \table -> do
where_ (table ^. comment_field `in_` valList all_comment_ids)
return table
-- | Get all CommentClosings/CommentRetracts of any of the given Comments'
-- ancestors, grouped by the given Comments.
fetchCommentsAncestorClosuresDB
:: [CommentId]
-> DB (Map CommentId [CommentClosing])
fetchCommentsAncestorRetractsDB
:: [CommentId]
-> DB (Map CommentId [CommentRetracting])
fetchCommentsAncestorClosuresDB =
commentsClosuresOrRetracts CommentClosingComment
fetchCommentsAncestorRetractsDB =
commentsClosuresOrRetracts CommentRetractingComment
commentsClosuresOrRetracts
:: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> EntityField val CommentId -> [CommentId] -> DB (Map CommentId [val])
commentsClosuresOrRetracts comment_field comment_ids =
fmap (foldr step mempty) $
select $
from $ \(ca `InnerJoin` table) -> do
on_ (ca ^. CommentAncestorAncestor ==. table ^. comment_field)
orderBy [asc (table ^. comment_field)]
where_ (ca ^. CommentAncestorComment `in_` valList comment_ids)
return (ca ^. CommentAncestorComment, table)
where
step (Value c, Entity _ v) = M.insertWith (++) c [v]
-- | Get a comment's ancestors' ids.
fetchCommentAncestorsDB :: CommentId -> DB [CommentId]
fetchCommentAncestorsDB = fmap (map unValue) . select . querCommentAncestors
subFetchCommentAncestorsDB :: CommentId -> SqlExpr (ValueList CommentId)
subFetchCommentAncestorsDB = subList_select . querCommentAncestors
fetchCommentDepthDB :: CommentId -> DB Int
fetchCommentDepthDB = fmap commentDepth . getJust
-- | Get the depth of a comment, given (maybe) its parent's CommentId.
fetchCommentDepthFromMaybeParentIdDB :: Maybe CommentId -> DB Int
fetchCommentDepthFromMaybeParentIdDB =
maybe (return 0) (fmap (+1) . fetchCommentDepthDB)
fetchCommentDepth404DB :: CommentId -> Handler Int
fetchCommentDepth404DB = fmap commentDepth . runYDB . get404
-- | Get the CommentFlagging even for this Comment, if there is one.
fetchCommentFlaggingDB :: CommentId -> DB (Maybe (Entity CommentFlagging))
fetchCommentFlaggingDB = getBy . UniqueCommentFlagging
-- | Get the CommentId this CommentId was rethreaded to, if it was.
fetchCommentRethreadDB :: CommentId -> DB (Maybe CommentId)
fetchCommentRethreadDB comment_id = fmap unValue . listToMaybe <$> (
select $
from $ \cr -> do
where_ $ cr ^. CommentRethreadOldComment ==. val comment_id
return $ cr ^. CommentRethreadNewComment)
-- | Get the last CommentId this CommentId was rethreaded to, if it was.
fetchCommentRethreadLastDB :: CommentId -> DB (Maybe CommentId)
fetchCommentRethreadLastDB comment_id = go Nothing comment_id
where
go mlast_comment comment = do
mnew_comment <- fetchCommentRethreadDB comment
case mnew_comment of
Nothing -> return mlast_comment
Just new_comment -> go mnew_comment new_comment
-- | Get a Comment's CommentTags.
fetchCommentCommentTagsDB :: CommentId -> DB [CommentTag]
fetchCommentCommentTagsDB comment_id = fmap (map entityVal) $
select $
from $ \ct -> do
where_ (ct ^. CommentTagComment ==. val comment_id)
return ct
fetchCommentCommentTagsInDB :: [CommentId] -> DB [CommentTag]
fetchCommentCommentTagsInDB comment_ids = fmap (map entityVal) $
select $
from $ \ct -> do
where_ (ct ^. CommentTagComment `in_` valList comment_ids)
return ct
-- | Get a Comment's descendants' ids (don't filter hidden or unapproved
-- comments).
fetchCommentAllCurrentDescendantsDB :: CommentId -> DB [CommentId]
fetchCommentAllCurrentDescendantsDB comment_id = fmap (map unValue) $
select $
from $ \ca -> do
where_ $
ca ^. CommentAncestorAncestor ==. val comment_id
&&. ca ^. CommentAncestorComment `notIn`
(subList_select $ from $ return . (^. CommentRethreadOldComment))
orderBy [asc (ca ^. CommentAncestorComment)]
return (ca ^. CommentAncestorComment)
-- | Get a Comment's descendants' ids (don't filter hidden or unapproved
-- comments).
fetchCommentAllDescendantsDB :: CommentId -> DB [CommentId]
fetchCommentAllDescendantsDB comment_id = fmap (map unValue) $
select $
from $ \ca -> do
where_ (ca ^. CommentAncestorAncestor ==. val comment_id)
orderBy [asc (ca ^. CommentAncestorComment)]
return (ca ^. CommentAncestorComment)
-- | Get all descendants of the given root comment.
fetchCommentDescendantsDB :: CommentId -> ExprCommentCond -> DB [Entity Comment]
fetchCommentDescendantsDB comment_id has_permission =
select $
from $ \c -> do
where_ $
has_permission c
&&. c ^. CommentId `in_` (
subList_select $
from $ \ca -> do
where_ (ca ^. CommentAncestorAncestor ==. val comment_id)
return (ca ^. CommentAncestorComment))
-- DO NOT change ordering here! buildCommentTree relies on it.
orderBy [asc (c ^. CommentParent), asc (c ^. CommentCreatedTs)]
return c
-- | Get all descendants of all given root comments.
fetchCommentsDescendantsDB
:: [CommentId]
-> ExprCommentCond
-> DB [Entity Comment]
fetchCommentsDescendantsDB comment_ids has_permission =
select $
from $ \c -> do
where_ $
c ^. CommentId `in_`
subList_select (querCommentsDescendants comment_ids)
&&. has_permission c
-- DO NOT change ordering here! buildCommentTree relies on it.
orderBy [asc (c ^. CommentParent), asc (c ^. CommentCreatedTs)]
return c
-- | Given a list of candidate CommentIds, return only those that have any
-- (possibly not visible) children.
fetchCommentsWithChildrenInDB :: [CommentId] -> DB [CommentId]
fetchCommentsWithChildrenInDB comment_ids = fmap (map unValue) $
selectDistinct $
from $ \ca -> do
where_ (ca ^. CommentAncestorAncestor `in_` valList comment_ids)
return (ca ^. CommentAncestorAncestor)
-- | Get the "true" target of this CommentId (which may be itself, if not
-- rethreaded - otherwise, ride the rethread train to the end)
fetchCommentDestinationDB :: CommentId -> YDB CommentId
fetchCommentDestinationDB comment_id = do
-- make sure the comment even exists, so this function terminates.
void $ get404 comment_id
mRethreaded <- fetchCommentRethreadDB comment_id
maybe (return comment_id) fetchCommentDestinationDB mRethreaded
-- | Get a Comment's Tags.
fetchCommentTagsDB :: CommentId -> DB [Entity Tag]
fetchCommentTagsDB comment_id =
select $
from $ \(ct `InnerJoin` t) -> do
on_ (ct ^. CommentTagTag ==. t ^. TagId)
where_ (ct ^. CommentTagComment ==. val comment_id)
return t
-- | Get a Comment's CommentTags for a specific Tag.
fetchCommentTagCommentTagsDB :: CommentId -> TagId -> DB [CommentTag]
fetchCommentTagCommentTagsDB comment_id tag_id = fmap (map entityVal) $
select $
from $ \ct -> do
where_ $
ct ^. CommentTagComment ==. val comment_id &&.
ct ^. CommentTagTag ==. val tag_id
return ct
makeCommentClosingMapDB
:: (Foldable c) => c CommentId
-> DB (Map CommentId CommentClosing)
makeCommentRetractingMapDB
:: (Foldable c) => c CommentId
-> DB (Map CommentId CommentRetracting)
makeCommentClosingMapDB =
closeOrRetractMap CommentClosingComment commentClosingComment
makeCommentRetractingMapDB =
closeOrRetractMap CommentRetractingComment commentRetractingComment
closeOrRetractMap
:: (Foldable c, PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> EntityField val CommentId
-> (val -> CommentId)
-> c CommentId
-> DB (Map CommentId val)
closeOrRetractMap comment_field comment_projection comment_ids =
fmap (foldr step mempty) $
select $
from $ \c -> do
where_ (c ^. comment_field `in_` valList comment_ids)
return c
where
-- step :: Entity val -> Map CommentId val -> Map CommentId val
step (Entity _ c) = M.insert (comment_projection c) c
-- | Given a collection of CommentId, make a map from CommentId to Entity
-- Ticket. Comments that are not tickets will simply not be in the map.
makeTicketMapDB
:: (Foldable c)
=> c CommentId -> DB (Map CommentId (Entity Ticket))
makeTicketMapDB comment_ids = fmap (foldr step mempty) $
select $
from $ \t -> do
where_ (t ^. TicketComment `in_` valList comment_ids)
return t
where
step t = M.insert (ticketComment (entityVal t)) t
makeClaimedTicketMapDB :: [CommentId] -> DB (Map CommentId TicketClaiming)
makeClaimedTicketMapDB comment_ids =
fmap (M.fromList . map (\(Value x, Entity _ y) -> (x, y))) $
select $
from $ \tc -> do
where_ (tc ^. TicketClaimingTicket `in_` valList comment_ids)
return (tc ^. TicketClaimingTicket, tc)
-- | Given a collection of CommentId, make a flag map. Comments that are
-- not flagged will simply not be in the map.
makeFlagMapDB
:: (Foldable c)
=> c CommentId -> DB (Map CommentId (CommentFlagging, [FlagReason]))
makeFlagMapDB comment_ids = fmap (go . map (\(Entity _ x, Value y) -> (x, y))) $
select $
from $ \(cf `InnerJoin` cfr) -> do
on_ (cf ^. CommentFlaggingId ==. cfr ^. CommentFlaggingReasonFlagging)
where_ (cf ^. CommentFlaggingComment `in_` valList comment_ids)
return (cf, cfr ^. CommentFlaggingReasonReason)
where
go :: [(CommentFlagging, FlagReason)]
-> Map CommentId (CommentFlagging, [FlagReason])
go = foldr
(\(cf@(CommentFlagging _ _ comment_id _), reason) ->
M.insertWith combine comment_id (cf, [reason]))
mempty
where
combine :: (CommentFlagging, [FlagReason])
-> (CommentFlagging, [FlagReason])
-> (CommentFlagging, [FlagReason])
combine (cf, reasons1) (_, reasons2) = (cf, reasons1 <> reasons2)
-- | Given a collection of CommentId, make a map from comment ids to sets
-- of watches. Comments that are not watched will simply not be in the map.
--
-- TODO: Return enough info to link to the root of the watch
makeWatchMapDB
:: (Foldable c)
=> c CommentId -> DB (Map CommentId (Set WatchedSubthread))
makeWatchMapDB comment_ids = fmap unwrapTheThings $ do
ancestral_watches <- select $ from $ \(ws `InnerJoin` ca) -> do
on_ $ ws ^. WatchedSubthreadRoot ==. ca ^. CommentAncestorAncestor
where_ $ ca ^. CommentAncestorComment `in_` valList comment_ids
return (ca ^. CommentAncestorComment, ws)
current_watches <- select $ from $ \ws -> do
where_ $ ws ^. WatchedSubthreadRoot `in_` valList comment_ids
return (ws ^. WatchedSubthreadRoot, ws)
return $ ancestral_watches <> current_watches
where
unwrapTheThings =
M.fromListWith mappend
. map (\(Value x, Entity _ y) -> (x, S.singleton y))
rethreadCommentDB
:: Maybe CommentId
-> DiscussionId
-> CommentId
-> UserId
-> Text
-> Int
-> SDB ()
rethreadCommentDB
mnew_parent_id
new_discussion_id
root_comment_id
user_id
reason
depth_offset = do
(old_comment_ids, new_comment_ids) <- lift $ do
descendants_ids <- fetchCommentAllCurrentDescendantsDB root_comment_id
let old_comment_ids = root_comment_id : descendants_ids
new_comment_ids <- flip State.evalStateT mempty $ forM old_comment_ids $ \comment_id -> do
rethread_map <- State.get
Just comment <- lift $ get comment_id
let new_parent_id = maybe mnew_parent_id Just $ M.lookup (commentParent comment) rethread_map
new_comment_id <- lift $ insert $ comment
{ commentDepth = commentDepth comment - depth_offset
, commentParent = new_parent_id
, commentDiscussion = new_discussion_id
}
State.put $ M.insert (Just comment_id) new_comment_id rethread_map
return new_comment_id
return (old_comment_ids, new_comment_ids)
now <- liftIO getCurrentTime
let new_root_comment_id = Prelude.head new_comment_ids -- This is kind of ugly, but it should be safe.
rethread = Rethread now user_id root_comment_id new_root_comment_id reason
rethread_id <- lift (insert rethread)
tell [ECommentRethreaded rethread_id rethread]
let updateForRethread :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> EntityField val CommentId
-> (SqlExpr (Entity val) -> SqlExpr (Entity CommentRethread) -> SqlExpr (Insertion val))
-> DB ()
updateForRethread comment_field constructor =
insertSelect $
from $ \(table `InnerJoin` cr) -> do
on_ (table ^. comment_field ==. cr ^. CommentRethreadOldComment)
where_ (table ^. comment_field `in_` valList old_comment_ids)
return (constructor table cr)
lift $ do
forM_ (zip old_comment_ids new_comment_ids) $ \(old_comment_id, new_comment_id) -> do
insert_ (CommentRethread rethread_id old_comment_id new_comment_id)
-- TODO(mitchell, david): pull the stuff below out of the for-loop
insertSelect $
from $ \(c `InnerJoin` ca) -> do
on_ (c ^. CommentParent ==. just (ca ^. CommentAncestorComment))
where_ (c ^. CommentId ==. val new_comment_id)
return (CommentAncestor <# val new_comment_id <&> (ca ^. CommentAncestorAncestor))
[Value maybe_new_parent_id] <-
select $
from $ \c -> do
where_ (c ^. CommentId ==. val new_comment_id)
return (c ^. CommentParent)
maybe (return ()) (insert_ . CommentAncestor new_comment_id) maybe_new_parent_id
update $ \t -> do
set t [ TicketComment =. val new_comment_id ]
where_ $ t ^. TicketComment ==. val old_comment_id
-- EVERYTHING with a foreign key on CommentId needs to be added here, for the
-- new comments. We don't want to update in-place because we *do* show the
-- rethreaded comments on Project feeds (for one thing).
--
-- The only table that's updated in-place is Ticket (see
-- above) because we don't want to change the ticket number
-- when we rethread a comment.
updateForRethread CommentClosingComment
(\cc cr -> CommentClosing
<# (cc ^. CommentClosingTs)
<&> (cc ^. CommentClosingClosedBy)
<&> (cc ^. CommentClosingReason)
<&> (cr ^. CommentRethreadNewComment))
updateForRethread CommentFlaggingComment
(\cf cr -> CommentFlagging
<# (cf ^. CommentFlaggingTs)
<&> (cf ^. CommentFlaggingFlagger)
<&> (cr ^. CommentRethreadNewComment)
<&> (cf ^. CommentFlaggingMessage))
updateForRethread CommentRetractingComment
(\r cr -> CommentRetracting
<# (r ^. CommentRetractingTs)
<&> (r ^. CommentRetractingReason)
<&> (cr ^. CommentRethreadNewComment))
updateForRethread CommentTagComment
(\ct cr -> CommentTag
<# (cr ^. CommentRethreadNewComment)
<&> (ct ^. CommentTagTag)
<&> (ct ^. CommentTagUser)
<&> (ct ^. CommentTagCount))
updateForRethread TicketClaimingTicket
(\tc cr -> TicketClaiming
<# (tc ^. TicketClaimingTs)
<&> (tc ^. TicketClaimingUser)
<&> (cr ^. CommentRethreadNewComment)
<&> (tc ^. TicketClaimingNote))
updateForRethread TicketOldClaimingTicket
(\toc cr -> TicketOldClaiming
<# (toc ^. TicketOldClaimingClaimTs)
<&> (toc ^. TicketOldClaimingUser)
<&> (cr ^. CommentRethreadNewComment)
<&> (toc ^. TicketOldClaimingNote)
<&> (toc ^. TicketOldClaimingReleaseNote)
<&> (toc ^. TicketOldClaimingReleasedTs))
updateForRethread UnapprovedCommentNotificationComment
(\ucn cr -> UnapprovedCommentNotification
<# (cr ^. CommentRethreadNewComment)
<&> (ucn ^. UnapprovedCommentNotificationNotification))
updateForRethread ViewCommentComment
(\vc cr -> ViewComment
<# (vc ^. ViewCommentUser)
<&> (cr ^. CommentRethreadNewComment))
updateForRethread WatchedSubthreadRoot
(\ws cr -> WatchedSubthread
<# (ws ^. WatchedSubthreadTs)
<&> (ws ^. WatchedSubthreadUser)
<&> (cr ^. CommentRethreadNewComment))
fetchCommentTicketsDB :: Set CommentId -> DB (Map CommentId (Entity Ticket))
fetchCommentTicketsDB comment_ids = do
ticket_entities <- select $ from $ \t -> do
where_ $ t ^. TicketComment `in_` valList (S.toList comment_ids)
return t
return $ M.fromList $ map (ticketComment . entityVal &&& id) ticket_entities
makeCommentRouteDB :: [Language] -> CommentId -> DB (Maybe (Route App))
makeCommentRouteDB langs comment_id = get comment_id >>= \case
Nothing -> return Nothing
Just comment ->
fetchDiscussionDB langs (commentDiscussion comment) >>= \case
DiscussionOnProject (Entity _ project) ->
return $ Just $
ProjectCommentR (projectHandle project) comment_id
DiscussionOnWikiPage (Entity _ wiki_target) -> do
project <- getJust $ wikiTargetProject wiki_target
return $ Just $
WikiCommentR
(projectHandle project)
(wikiTargetLanguage wiki_target)
(wikiTargetTarget wiki_target)
comment_id
DiscussionOnUser (Entity user_id _) ->
return $ Just $ UserCommentR user_id comment_id
DiscussionOnBlogPost (Entity _ blog_post) -> do
project <- getJust $ blogPostProject blog_post
return $ Just $
BlogPostCommentR
(projectHandle project)
(blogPostHandle blog_post)
comment_id
| chreekat/snowdrift | Model/Comment.hs | agpl-3.0 | 45,534 | 0 | 29 | 13,367 | 10,241 | 5,146 | 5,095 | -1 | -1 |
module Acquire.Trace(trace) where
import Control.Monad.Trans
import Data.Time.Clock
import Data.Time.Format
trace :: (MonadIO m) => String -> m ()
trace msg = liftIO $ do
t <- getCurrentTime
putStrLn $ "[" ++ formatTime defaultTimeLocale "%F %T.%q %z" t ++ "] " ++ msg
| abailly/hsgames | acquire/src/Acquire/Trace.hs | apache-2.0 | 306 | 0 | 11 | 79 | 97 | 52 | 45 | 8 | 1 |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE helpset
PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN"
"http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="en-GB">
<title>WAFP Extension</title>
<maps>
<homeID>cmss</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
| veggiespam/zap-extensions | addOns/cmss/src/main/javahelp/help/helpset.hs | apache-2.0 | 973 | 85 | 50 | 169 | 401 | 213 | 188 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-missing-signatures #-}
module Main (main) where
import Control.Monad
import Data.Bits
import qualified Data.Vector.Primitive as P
import Succinct.Internal.PopCount
import Test.Framework.Providers.HUnit
import Test.Framework.Providers.QuickCheck2
import Test.Framework.TH
import Test.HUnit hiding (Test, assert)
case_vector_slice :: IO ()
case_vector_slice = do
let v = P.fromList [1,3,7,15,31,63]
12 @=? popCountSlice (vectorToInternal v) 2 3
case_all_one_bits :: IO ()
case_all_one_bits = do
let v = P.replicate 16 (-1)
forM_ [0..1024] $ \i -> do
assertEqual ("for index " ++ show i) i
(popCountBitSlice (vectorToInternal v) 0 i)
prop_pop_count_word64 w =
popCount w == popCountWord64 w
main :: IO ()
main = $defaultMainGenerator
| Gabriel439/succinct | tests/popCountTest.hs | bsd-2-clause | 816 | 0 | 15 | 126 | 263 | 144 | 119 | 25 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Examples.EnvVarUpdate(envvarupdate) where
import Development.NSIS
import Development.NSIS.Plugins.EnvVarUpdate
envvarupdate = do
name "envvarupdate"
outFile "envvarupdate.exe"
requestExecutionLevel User
section "" [] $ do
let assert x = iff_ (getEnvVar HKCU "NSIS_TEST" %/= x) $ alert $ "FAILED!"
deleteEnvVar HKCU "NSIS_TEST"
setEnvVar HKCU "NSIS_TEST" "This is a;test"
assert "This is a;test"
setEnvVarAppend HKCU "NSIS_TEST" "foo bar"
assert "This is a;test;foo bar"
setEnvVarPrepend HKCU "NSIS_TEST" "test"
assert "test;This is a;foo bar"
setEnvVarRemove HKCU "NSIS_TEST" "test"
assert "This is a;foo bar"
setEnvVarRemove HKCU "NSIS_TEST" "foo bar"
assert "This is a"
setEnvVarPrepend HKCU "NSIS_TEST" "extra"
assert "extra;This is a"
setEnvVarRemove HKCU "NSIS_TEST" "bob"
assert "extra;This is a"
setEnvVar HKCU "NSIS_TEST" "bob;bob;bob;x;bob"
setEnvVarRemove HKCU "NSIS_TEST" "bob"
assert "x"
setEnvVarRemove HKCU "NSIS_TEST" "x"
assert ""
setEnvVar HKCU "NSIS_TEST" "y"
deleteEnvVar HKCU "NSIS_TEST"
assert ""
alert $ "USER $$PATH = " & getEnvVar HKCU "PATH"
alert $ "MACHINE $$PATH = " & getEnvVar HKLM "PATH"
alert "Expecting to abort with a String limit error"
deleteEnvVar HKCU "NSIS_TEST"
val <- mutable_ "This is a test that will overflow at some point"
i <- mutable_ 0
while (i %< 100) $ do
i @= i + 1
val @= val & val
setEnvVar HKCU "NSIS_TEST" val
alert "Failed test, should have got a string limit error"
| ndmitchell/nsis | test/Examples/EnvVarUpdate.hs | bsd-3-clause | 1,777 | 0 | 18 | 521 | 403 | 168 | 235 | 44 | 1 |
-- |
-- Module : Crypto.MAC.CMAC
-- License : BSD-style
-- Maintainer : Kei Hibino <[email protected]>
-- Stability : experimental
-- Portability : unknown
--
-- Provide the CMAC (Cipher based Message Authentification Code) base algorithm.
-- <http://en.wikipedia.org/wiki/CMAC>
-- <http://csrc.nist.gov/publications/nistpubs/800-38B/SP_800-38B.pdf>
--
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Crypto.MAC.CMAC
( cmac
, CMAC
, subKeys
) where
import Data.Word
import Data.Bits (setBit, testBit, shiftL)
import Data.List (foldl')
import Crypto.Cipher.Types
import Crypto.Internal.ByteArray (ByteArrayAccess, ByteArray, Bytes)
import qualified Crypto.Internal.ByteArray as B
-- | Authentication code
newtype CMAC a = CMAC Bytes
deriving (ByteArrayAccess)
instance Eq (CMAC a) where
CMAC b1 == CMAC b2 = B.constEq b1 b2
-- | compute a MAC using the supplied cipher
cmac :: (ByteArrayAccess bin, BlockCipher cipher)
=> cipher -- ^ key to compute CMAC with
-> bin -- ^ input message
-> CMAC cipher -- ^ output tag
cmac k msg =
CMAC $ foldl' (\c m -> ecbEncrypt k $ bxor c m) zeroV ms
where
bytes = blockSize k
zeroV = B.replicate bytes 0 :: Bytes
(k1, k2) = subKeys k
ms = cmacChunks k k1 k2 $ B.convert msg
cmacChunks :: (BlockCipher k, ByteArray ba) => k -> ba -> ba -> ba -> [ba]
cmacChunks k k1 k2 = rec' where
rec' msg
| B.null tl = if lack == 0
then [bxor k1 hd]
else [bxor k2 $ hd `B.append` B.pack (0x80 : replicate (lack - 1) 0)]
| otherwise = hd : rec' tl
where
bytes = blockSize k
(hd, tl) = B.splitAt bytes msg
lack = bytes - B.length hd
-- | make sub-keys used in CMAC
subKeys :: (BlockCipher k, ByteArray ba)
=> k -- ^ key to compute CMAC with
-> (ba, ba) -- ^ sub-keys to compute CMAC
subKeys k = (k1, k2) where
ipt = cipherIPT k
k0 = ecbEncrypt k $ B.replicate (blockSize k) 0
k1 = subKey ipt k0
k2 = subKey ipt k1
-- polynomial multiply operation to culculate subkey
subKey :: (ByteArray ba) => [Word8] -> ba -> ba
subKey ipt ws = case B.unpack ws of
[] -> B.empty
w:_ | testBit w 7 -> B.pack ipt `bxor` shiftL1 ws
| otherwise -> shiftL1 ws
shiftL1 :: (ByteArray ba) => ba -> ba
shiftL1 = B.pack . shiftL1W . B.unpack
shiftL1W :: [Word8] -> [Word8]
shiftL1W [] = []
shiftL1W ws@(_:ns) = rec' $ zip ws (ns ++ [0]) where
rec' [] = []
rec' ((x,y):ps) = w : rec' ps
where
w | testBit y 7 = setBit sl1 0
| otherwise = sl1
where sl1 = shiftL x 1
bxor :: ByteArray ba => ba -> ba -> ba
bxor = B.xor
-----
cipherIPT :: BlockCipher k => k -> [Word8]
cipherIPT = expandIPT . blockSize
-- Data type which represents the smallest irreducibule binary polynomial
-- against specified degree.
--
-- Maximum degree bit and degree 0 bit are omitted.
-- For example, The value /Q 7 2 1/ corresponds to the degree /128/.
-- It represents that the smallest irreducible binary polynomial of degree 128
-- is x^128 + x^7 + x^2 + x^1 + 1.
data IPolynomial
= Q Int Int Int
--- | T Int
iPolynomial :: Int -> Maybe IPolynomial
iPolynomial = d where
d 64 = Just $ Q 4 3 1
d 128 = Just $ Q 7 2 1
d _ = Nothing
-- Expand a tail bit pattern of irreducible binary polynomial
expandIPT :: Int -> [Word8]
expandIPT bytes = expandIPT' bytes ipt where
ipt = maybe (error $ "Irreducible binary polynomial not defined against " ++ show nb ++ " bit") id
$ iPolynomial nb
nb = bytes * 8
-- Expand a tail bit pattern of irreducible binary polynomial
expandIPT' :: Int -- ^ width in byte
-> IPolynomial -- ^ irreducible binary polynomial definition
-> [Word8] -- ^ result bit pattern
expandIPT' bytes (Q x y z) =
reverse . setB x . setB y . setB z . setB 0 $ replicate bytes 0
where
setB i ws = hd ++ setBit (head tl) r : tail tl where
(q, r) = i `quotRem` 8
(hd, tl) = splitAt q ws
| vincenthz/cryptonite | Crypto/MAC/CMAC.hs | bsd-3-clause | 4,212 | 0 | 16 | 1,239 | 1,231 | 654 | 577 | 82 | 3 |
module Control.Monad.CompatSpec (main, spec) where
import Test.Hspec
import Control.Monad.Compat
import Prelude ()
import Prelude.Compat
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "(<$!>)" $ do
it "is a strict version of (<$>)" $ do
not <$!> [True, False] `shouldBe` not <$> [True, False]
| beni55/base-compat | test/Control/Monad/CompatSpec.hs | mit | 366 | 0 | 16 | 106 | 114 | 64 | 50 | 12 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Constants used throughout the project.
module Stack.Constants
(builtConfigFileFromDir
,builtFileFromDir
,configuredFileFromDir
,defaultShakeThreads
,distDirFromDir
,distRelativeDir
,haskellFileExts
,projectDockerSandboxDir
,rawGithubUrl
,stackDotYaml
,stackRootEnvVar
,userDocsDir
,configCacheFile
,configCabalMod
,buildCacheFile
,testSuccessFile
,testBuiltFile
,benchBuiltFile
,stackProgName
,wiredInPackages
,cabalPackageName
,implicitGlobalDir
,hpcDirFromDir
,dotHpc)
where
import Control.Monad.Catch (MonadThrow)
import Control.Monad.Reader
import Data.HashSet (HashSet)
import qualified Data.HashSet as HashSet
import Data.Text (Text)
import qualified Data.Text as T
import Path as FL
import Prelude
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-- | Extensions used for Haskell files.
haskellFileExts :: [Text]
haskellFileExts = ["hs","hsc","lhs"]
-- | The filename used for completed build indicators.
builtFileFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs File)
builtFileFromDir fp = do
dist <- distDirFromDir fp
return (dist </> $(mkRelFile "stack.gen"))
-- | The filename used for completed configure indicators.
configuredFileFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs File)
configuredFileFromDir fp = do
dist <- distDirFromDir fp
return (dist </> $(mkRelFile "setup-config"))
-- | The filename used for completed build indicators.
builtConfigFileFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs File)
builtConfigFileFromDir fp =
liftM (fp </>) builtConfigRelativeFile
-- | Relative location of completed build indicators.
builtConfigRelativeFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> m (Path Rel File)
builtConfigRelativeFile = do
dist <- distRelativeDir
return (dist </> $(mkRelFile "stack.config"))
-- | Default shake thread count for parallel builds.
defaultShakeThreads :: Int
defaultShakeThreads = 4
-- -- | Hoogle database file.
-- hoogleDatabaseFile :: Path Abs Dir -> Path Abs File
-- hoogleDatabaseFile docLoc =
-- docLoc </>
-- $(mkRelFile "default.hoo")
-- -- | Extension for hoogle databases.
-- hoogleDbExtension :: String
-- hoogleDbExtension = "hoo"
-- -- | Extension of haddock files
-- haddockExtension :: String
-- haddockExtension = "haddock"
-- | User documentation directory.
userDocsDir :: Config -> Path Abs Dir
userDocsDir config = configStackRoot config </> $(mkRelDir "doc/")
-- | The filename used for dirtiness check of source files.
buildCacheFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
buildCacheFile dir = do
liftM
(</> $(mkRelFile "stack-build-cache"))
(distDirFromDir dir)
-- | The filename used to mark tests as having succeeded
testSuccessFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testSuccessFile dir =
liftM
(</> $(mkRelFile "stack-test-success"))
(distDirFromDir dir)
-- | The filename used to mark tests as having built
testBuiltFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
testBuiltFile dir =
liftM
(</> $(mkRelFile "stack-test-built"))
(distDirFromDir dir)
-- | The filename used to mark benchmarks as having built
benchBuiltFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory
-> m (Path Abs File)
benchBuiltFile dir =
liftM
(</> $(mkRelFile "stack-bench-built"))
(distDirFromDir dir)
-- | The filename used for dirtiness check of config.
configCacheFile :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCacheFile dir = do
liftM
(</> $(mkRelFile "stack-config-cache"))
(distDirFromDir dir)
-- | The filename used for modification check of .cabal
configCabalMod :: (MonadThrow m, MonadReader env m, HasPlatform env,HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs File)
configCabalMod dir = do
liftM
(</> $(mkRelFile "stack-cabal-mod"))
(distDirFromDir dir)
-- | Directory for HPC work.
hpcDirFromDir
:: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> Path Abs Dir -- ^ Package directory.
-> m (Path Abs Dir)
hpcDirFromDir dir = do
liftM (</> $(mkRelDir "hpc")) (distDirFromDir dir)
-- | Package's build artifacts directory.
distDirFromDir :: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> Path Abs Dir
-> m (Path Abs Dir)
distDirFromDir fp =
liftM (fp </>) distRelativeDir
-- | Relative location of build artifacts.
distRelativeDir :: (MonadThrow m, MonadReader env m, HasPlatform env, HasEnvConfig env)
=> m (Path Rel Dir)
distRelativeDir = do
cabalPkgVer <- asks (envConfigCabalVersion . getEnvConfig)
platform <- platformRelDir
cabal <-
parseRelDir $
packageIdentifierString
(PackageIdentifier cabalPackageName cabalPkgVer)
return $
workDirRel </>
$(mkRelDir "dist") </>
platform </>
cabal
-- | Get a URL for a raw file on Github
rawGithubUrl :: Text -- ^ user/org name
-> Text -- ^ repo name
-> Text -- ^ branch name
-> Text -- ^ filename
-> Text
rawGithubUrl org repo branch file = T.concat
[ "https://raw.githubusercontent.com/"
, org
, "/"
, repo
, "/"
, branch
, "/"
, file
]
-- -- | Hoogle database file.
-- hoogleDatabaseFile :: Path Abs Dir -> Path Abs File
-- hoogleDatabaseFile docLoc =
-- docLoc </>
-- $(mkRelFile "default.hoo")
-- -- | Extension for hoogle databases.
-- hoogleDbExtension :: String
-- hoogleDbExtension = "hoo"
-- -- | Extension of haddock files
-- haddockExtension :: String
-- haddockExtension = "haddock"
-- | Docker sandbox from project root.
projectDockerSandboxDir :: Path Abs Dir -> Path Abs Dir
projectDockerSandboxDir projectRoot = projectRoot </> workDirRel </> $(mkRelDir "docker/")
-- | Name of the 'stack' program.
stackProgName :: String
stackProgName = "stack"
-- | The filename used for the stack config file.
stackDotYaml :: Path Rel File
stackDotYaml = $(mkRelFile "stack.yaml")
-- | Environment variable used to override the '~/.stack' location.
stackRootEnvVar :: String
stackRootEnvVar = "STACK_ROOT"
-- See https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey
wiredInPackages :: HashSet PackageName
wiredInPackages =
maybe (error "Parse error in wiredInPackages") HashSet.fromList mparsed
where
mparsed = sequence $ map parsePackageName
[ "ghc-prim"
, "integer-gmp"
, "integer-simple"
, "base"
, "rts"
, "template-haskell"
, "dph-seq"
, "dph-par"
, "ghc"
, "interactive"
]
-- | Just to avoid repetition and magic strings.
cabalPackageName :: PackageName
cabalPackageName =
$(mkPackageName "Cabal")
-- | Implicit global directory used when outside of a project.
implicitGlobalDir :: Path Abs Dir -- ^ Stack root.
-> Path Abs Dir
implicitGlobalDir p =
p </>
$(mkRelDir "global")
-- | Where .mix files go.
dotHpc :: Path Rel Dir
dotHpc = $(mkRelDir ".hpc")
| CRogers/stack | src/Stack/Constants.hs | bsd-3-clause | 8,314 | 0 | 12 | 2,066 | 1,713 | 919 | 794 | 179 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE PatternGuards, BangPatterns, RecordWildCards #-}
{-# LANGUAGE Safe #-}
module Cryptol.TypeCheck.Solve
( simplifyAllConstraints
, proveImplication
, wfType
, wfTypeFunction
, improveByDefaulting
, defaultReplExpr
, simpType
, simpTypeMaybe
) where
import Cryptol.Parser.AST(LQName, thing)
import Cryptol.Parser.Position (emptyRange)
import Cryptol.TypeCheck.PP(pp)
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.Monad
import Cryptol.TypeCheck.Subst
(apSubst,fvs,singleSubst,substToList, isEmptySubst,
emptySubst,Subst,listSubst, (@@), Subst,
apSubstMaybe)
import Cryptol.TypeCheck.Solver.Class
import Cryptol.TypeCheck.Solver.Selector(tryHasGoal)
import qualified Cryptol.TypeCheck.Solver.Numeric.AST as Num
import qualified Cryptol.TypeCheck.Solver.Numeric.ImportExport as Num
import Cryptol.TypeCheck.Solver.Numeric.Interval (Interval)
import qualified Cryptol.TypeCheck.Solver.Numeric.Simplify1 as Num
import qualified Cryptol.TypeCheck.Solver.Numeric.SimplifyExpr as Num
import qualified Cryptol.TypeCheck.Solver.CrySAT as Num
import Cryptol.TypeCheck.Solver.CrySAT (debugBlock, DebugLog(..))
import Cryptol.TypeCheck.Solver.Simplify (tryRewritePropAsSubst)
import Cryptol.Utils.PP (text)
import Cryptol.Utils.Panic(panic)
import Cryptol.Utils.Misc(anyJust)
import Control.Monad (unless, guard)
import Data.Either(partitionEithers)
import Data.Maybe(catMaybes, fromMaybe, mapMaybe)
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Set ( Set )
import qualified Data.Set as Set
{- | Add additional constraints that ensure validity of type function.
Note that these constraints do not introduce additional malformed types,
so the well-formedness constraints are guaranteed to be well-formed.
This assumes that the parameters are well-formed. -}
wfTypeFunction :: TFun -> [Type] -> [Prop]
wfTypeFunction TCSub [a,b] = [ a >== b, pFin b]
wfTypeFunction TCDiv [a,b] = [ b >== tOne, pFin a ]
wfTypeFunction TCMod [a,b] = [ b >== tOne, pFin a ]
wfTypeFunction TCLenFromThen [a,b,w] =
[ pFin a, pFin b, pFin w, a =/= b, w >== tWidth a ]
wfTypeFunction TCLenFromThenTo [a,b,c] = [ pFin a, pFin b, pFin c, a =/= b ]
wfTypeFunction _ _ = []
-- | Add additional constraints that ensure the validity of a type.
wfType :: Type -> [Prop]
wfType t =
case t of
TCon c ts ->
let ps = concatMap wfType ts
in case c of
TF f -> wfTypeFunction f ts ++ ps
_ -> ps
TVar _ -> []
TUser _ _ s -> wfType s
TRec fs -> concatMap (wfType . snd) fs
--------------------------------------------------------------------------------
simplifyAllConstraints :: InferM ()
simplifyAllConstraints =
do mapM_ tryHasGoal =<< getHasGoals
gs <- getGoals
cfg <- getSolverConfig
(mb,su) <- io (Num.withSolver cfg (`simpGoals'` gs))
extendSubst su
case mb of
Right gs1 -> addGoals gs1
Left badGs -> mapM_ (recordError . UnsolvedGoal True) badGs
proveImplication :: LQName -> [TParam] -> [Prop] -> [Goal] -> InferM Subst
proveImplication lnam as ps gs =
do evars <- varsWithAsmps
cfg <- getSolverConfig
(mbErr,su) <- io (proveImplicationIO cfg lnam evars as ps gs)
case mbErr of
Right ws -> mapM_ recordWarning ws
Left err -> recordError err
return su
proveImplicationIO :: SolverConfig
-> LQName -- ^ Checking this functi
-> Set TVar -- ^ These appear in the env., and we should
-- not try to default the
-> [TParam] -- ^ Type parameters
-> [Prop] -- ^ Assumed constraint
-> [Goal] -- ^ Collected constraints
-> IO (Either Error [Warning], Subst)
proveImplicationIO _ _ _ _ [] [] = return (Right [], emptySubst)
proveImplicationIO cfg lname varsInEnv as ps gs =
Num.withSolver cfg $ \s ->
debugBlock s "proveImplicationIO" $
do debugBlock s "assumes" (debugLog s ps)
debugBlock s "shows" (debugLog s gs)
debugLog s "1. ------------------"
_simpPs <- Num.assumeProps s ps
mbImps <- Num.check s
debugLog s "2. ------------------"
case mbImps of
Nothing ->
do debugLog s "(contradiction in assumptions)"
return (Left $ UnusableFunction (thing lname) ps, emptySubst)
Just (imps,extra) ->
do let su = importImps imps
gs0 = apSubst su gs
debugBlock s "improvement from assumptions:" $ debugLog s su
let (scs,invalid) = importSideConds extra
unless (null invalid) $
panic "proveImplicationIO" ( "Unable to import all side conditions:"
: map (show . Num.ppProp) invalid )
let gs1 = filter ((`notElem` ps) . goal) gs0
debugLog s "3. ---------------------"
(mb,su1) <- simpGoals' s (scs ++ gs1)
case mb of
Left badGs -> reportUnsolved badGs (su1 @@ su)
Right [] -> return (Right [], su1 @@ su)
Right us ->
-- Last hope: try to default stuff
do let vs = Set.filter isFreeTV $ fvs $ map goal us
dVars = Set.toList (vs `Set.difference` varsInEnv)
(_,us1,su2,ws) <- improveByDefaultingWith s dVars us
case us1 of
[] -> return (Right ws, su2 @@ su1 @@ su)
_ -> reportUnsolved us1 (su2 @@ su1 @@ su)
where
reportUnsolved us su =
return ( Left $ UnsolvedDelcayedCt
$ DelayedCt { dctSource = lname
, dctForall = as
, dctAsmps = ps
, dctGoals = us
}, su)
{- Constraints and satisfiability:
1. [Satisfiable] A collection of constraints is _satisfiable_, if there is an
assignment for the variables that make all constraints true.
2. [Valid] If a constraint is satisfiable for any assignment of its free
variables, then it is _valid_, and may be ommited.
3. [Partial] A constraint may _partial_, which means that under some
assignment it is neither true nor false. For example:
`x - y > 5` is true for `{ x = 15, y = 3 }`, it is false for
`{ x = 5, y = 4 }`, and it is neither for `{ x = 1, y = 2 }`.
Note that constraints that are always true or undefined are NOT
valid, as there are assignemntes for which they are not true.
An example of such constraint is `x - y >= 0`.
4. [Provability] Instead of thinking of three possible values for
satisfiability (i.e., true, false, and unknown), we could instead
think of asking: "Is constraint C provable". This essentailly
maps "true" to "true", and "false,unknown" to "false", if we
treat constraints with malformed parameters as unprovable.
-}
{-
The plan:
1. Start with a set of constraints, CS
2. Compute its well-defined closure, DS.
3. Simplify constraints: evaluate terms in constraints as much as possible
4. Solve: eliminate constraints that are true
5. Check for consistency
6. Compute improvements
7. For each type in the improvements, add well-defined constraints
8. Instantiate constraints with substitution
9. Goto 3
-}
simpGoals' :: Num.Solver -> [Goal] -> IO (Either [Goal] [Goal], Subst)
simpGoals' s gs0 = go emptySubst [] (wellFormed gs0 ++ gs0)
where
-- Assumes that the well-formed constraints are themselves well-formed.
wellFormed gs = [ g { goal = p } | g <- gs, p <- wfType (goal g) ]
go su old [] = return (Right old, su)
go su old gs =
do gs1 <- simplifyConstraintTerms s gs
res <- solveConstraints s old gs1
case res of
Left err -> return (Left err, su)
Right gs2 ->
do let gs3 = gs2 ++ old
mb <- computeImprovements s gs3
case mb of
Left err -> return (Left err, su)
Right impSu ->
let (unchanged,changed) =
partitionEithers (map (applyImp impSu) gs3)
new = wellFormed changed
in go (impSu @@ su) unchanged (new ++ changed)
applyImp su g = case apSubstMaybe su (goal g) of
Nothing -> Left g
Just p -> Right g { goal = p }
{- Note:
It is good to consider the other goals when evaluating terms.
For example, consider the constraints:
P (x * inf), x >= 1
We cannot simplify `x * inf` on its own, because we do not know if `x`
might be 0. However, in the contxt of `x >= 1`, we know that this is
impossible, and we can simplify the constraints to:
P inf, x >= 1
However, we should be careful to avoid circular reasoning, as we wouldn't
want to use the fact that `x >= 1` to simplify `x >= 1` to true.
-}
-- XXX: currently simplify individually
simplifyConstraintTerms :: Num.Solver -> [Goal] -> IO [Goal]
simplifyConstraintTerms s gs =
debugBlock s "Simplifying terms" $ return (map simpGoal gs)
where simpGoal g = g { goal = simpProp (goal g) }
solveConstraints :: Num.Solver ->
[Goal] {- We may use these, but don't try to solve,
we already tried and failed. -} ->
[Goal] {- Need to solve these -} ->
IO (Either [Goal] [Goal])
-- ^ Left: contradiciting goals,
-- Right: goals that were not solved, or sub-goals
-- for solved goals. Does not include "old"
solveConstraints s otherGs gs0 =
debugBlock s "Solving constraints" $ solveClassCts [] [] gs0
where
otherNumerics = [ g | Right g <- map Num.numericRight otherGs ]
solveClassCts unsolvedClass numerics [] =
do unsolvedNum <- solveNumerics s otherNumerics numerics
return (Right (unsolvedClass ++ unsolvedNum))
solveClassCts unsolved numerics (g : gs) =
case Num.numericRight g of
Right n -> solveClassCts unsolved (n : numerics) gs
Left c ->
case classStep c of
Unsolvable -> return (Left [g])
Unsolved -> solveClassCts (g : unsolved) numerics gs
Solved Nothing subs -> solveClassCts unsolved numerics (subs ++ gs)
Solved (Just su) _ -> panic "solveClassCts"
[ "Unexpected substituion", show su ]
solveNumerics :: Num.Solver ->
[(Goal,Num.Prop)] {- ^ Consult these -} ->
[(Goal,Num.Prop)] {- ^ Solve these -} ->
IO [Goal]
solveNumerics s consultGs solveGs =
Num.withScope s $
do _ <- Num.assumeProps s (map (goal . fst) consultGs)
Num.simplifyProps s (map Num.knownDefined solveGs)
computeImprovements :: Num.Solver -> [Goal] -> IO (Either [Goal] Subst)
computeImprovements s gs =
debugBlock s "Computing improvements" $
do let nums = [ g | Right g <- map Num.numericRight gs ]
res <- Num.withScope s $
do _ <- Num.assumeProps s (map (goal . fst) nums)
mb <- Num.check s
case mb of
Nothing -> return Nothing
Just (suish,_ps1) ->
do let (su,_ps2) = importSplitImps suish
-- Num.check has already checked that the intervals are sane,
-- so we don't need to check for a broken interval here
Right ints <- Num.getIntervals s
return (Just (ints,su))
case res of
Just (ints,su)
| isEmptySubst su
, (x,t) : _ <- mapMaybe (improveByDefn ints) gs ->
do let su' = singleSubst x t
debugLog s ("Improve by definition: " ++ show (pp su'))
return (Right su')
| otherwise -> return (Right su)
Nothing ->
do bad <- Num.minimizeContradictionSimpDef s
(map Num.knownDefined nums)
return (Left bad)
improveByDefn :: Map TVar Interval -> Goal -> Maybe (TVar,Type)
improveByDefn ints Goal { .. } =
do (var,ty) <- tryRewritePropAsSubst ints goal
return (var,simpType ty)
-- | Import an improving substitutin (i.e., a bunch of equations)
-- into a Cryptol substitution (which is idempotent).
-- The substitution will contain only unification variables.
-- "Improvements" on skolem variables become additional constraints.
importSplitImps :: Map Num.Name Num.Expr -> (Subst, [Prop])
importSplitImps = mk . partitionEithers . map imp . Map.toList
where
mk (uni,props) = (listSubst (catMaybes uni), props)
imp (x,e) = case (x, Num.importType e) of
(Num.UserName tv, Just ty) ->
case tv of
TVFree {} -> Left (Just (tv,ty))
TVBound {} -> Right (TVar tv =#= ty)
{- This may happen if we are working on an implication,
and we have an improvement about a variable in the
assumptions that is not in any og the goals.
XXX: Perhaps, we should mark these variable, so we don't waste
time to "improve" them. -}
_ -> Left Nothing
-- | Import an improving substitution into a Cryptol substitution.
-- The substitution will contain both unification and skolem variables,
-- so this should be used when processing *givens*.
importImps :: Map Num.Name Num.Expr -> Subst
importImps = listSubst . map imp . Map.toList
where
imp (x,e) = case (x, Num.importType e) of
(Num.UserName tv, Just ty) -> (tv,ty)
_ -> panic "importImps" [ "Failed to import:", show x, show e ]
importSideConds :: [Num.Prop] -> ([Goal],[Num.Prop])
importSideConds = go [] []
where
go ok bad [] = ([ Goal CtImprovement emptyRange g | g <- ok], bad)
go ok bad (p:ps) = case Num.importProp p of
Just p' -> go (p' ++ ok) bad ps
Nothing -> go ok (p:bad) ps
--------------------------------------------------------------------------------
-- This is what we use to avoid ambiguity when generalizing.
{- If a variable, `a`, is:
1. Of kind KNum
2. Generic (i.e., does not appear in the environment)
3. It appears only in constraints but not in the resulting type
(i.e., it is not on the RHS of =>)
4. It (say, the variable 'a') appears only in constraints like this:
3.1 `a >= t` with (`a` not in `fvs t`)
3.2 in the `s` of `fin s`
Then we replace `a` with `max(t1 .. tn)` where the `ts`
are from the constraints `a >= t`.
If `t1 .. tn` is empty, then we replace `a` with 0.
This function assumes that 1-3 have been checked, and implements the rest.
So, given some variables and constraints that are about to be generalized,
we return:
1. a new (same or smaller) set of variables to quantify,
2. a new set of constraints,
3. a substitution which indicates what got defaulted.
-}
improveByDefaulting ::
SolverConfig ->
[TVar] -> -- candidates for defaulting
[Goal] -> -- constraints
IO ( [TVar] -- non-defaulted
, [Goal] -- new constraints
, Subst -- improvements from defaulting
, [Warning] -- warnings about defaulting
)
improveByDefaulting cfg xs gs =
Num.withSolver cfg $ \s -> improveByDefaultingWith s xs gs
improveByDefaultingWith ::
Num.Solver ->
[TVar] -> -- candidates for defaulting
[Goal] -> -- constraints
IO ( [TVar] -- non-defaulted
, [Goal] -- new constraints
, Subst -- improvements from defaulting
, [Warning] -- warnings about defaulting
)
improveByDefaultingWith s as ps =
classify (Map.fromList [ (a,([],Set.empty)) | a <- as ]) [] [] ps
where
-- leq: candidate definitions (i.e. of the form x >= t, x `notElem` fvs t)
-- for each of these, we keep the list of `t`, and the free vars in them.
-- fins: all `fin` constraints
-- others: any other constraints
classify leqs fins others [] =
do let -- First, we use the `leqs` to choose some definitions.
(defs, newOthers) = select [] [] (fvs others) (Map.toList leqs)
su = listSubst defs
-- Do this to simplify the instantiated "fin" constraints.
(mb,su1) <- simpGoals' s (newOthers ++ others ++ apSubst su fins)
case mb of
Right gs1 ->
let warn (x,t) =
case x of
TVFree _ _ _ d -> DefaultingTo d t
TVBound {} -> panic "Crypto.TypeCheck.Infer"
[ "tryDefault attempted to default a quantified variable."
]
newSu = su1 @@ su -- XXX: is that right?
names = Set.fromList $ map fst $ fromMaybe [] $ substToList newSu
in return ( [ a | a <- as, not (a `Set.member` names) ]
, gs1
, newSu
, map warn defs
)
-- Something went wrong, don't default.
Left _ -> return (as,ps,su1 @@ su,[])
classify leqs fins others (prop : more) =
case tNoUser (goal prop) of
-- We found a `fin` constraint.
TCon (PC PFin) [ _ ] -> classify leqs (prop : fins) others more
-- Things of the form: x >= T(x) are not defaulted.
TCon (PC PGeq) [ TVar x, t ]
| x `elem` as && x `Set.notMember` freeRHS ->
classify leqs' fins others more
where freeRHS = fvs t
add (xs1,vs1) (xs2,vs2) = (xs1 ++ xs2, Set.union vs1 vs2)
leqs' = Map.insertWith add x ([(t,prop)],freeRHS) leqs
_ -> classify leqs fins (prop : others) more
-- Pickout which variables may be defaulted and how.
-- XXX: simpType t
select yes no _ [] = ([ (x, t) | (x,t) <- yes ] ,no)
select yes no otherFree ((x,(rhsG,vs)) : more) =
select newYes newNo newFree newMore
where
(ts,gs) = unzip rhsG
-- `x` selected only if appears nowehere else.
-- this includes other candidates for defaulting.
(newYes,newNo,newFree,newMore)
-- Mentioned in other constraints, definately not defaultable.
| x `Set.member` otherFree = noDefaulting
| otherwise =
let deps = [ y | (y,(_,yvs)) <- more, x `Set.member` yvs ]
recs = filter (`Set.member` vs) deps
in if not (null recs) || isBoundTV x -- x >= S(y), y >= T(x)
then noDefaulting
-- x >= S, y >= T(x) or
-- x >= S(y), y >= S
else yesDefaulting
where
noDefaulting = ( yes, gs ++ no, vs `Set.union` otherFree, more )
yesDefaulting =
let ty = case ts of
[] -> tNum (0::Int)
_ -> foldr1 tMax ts
su1 = singleSubst x ty
in ( (x,ty) : [ (y,apSubst su1 t) | (y,t) <- yes ]
, no -- We know that `x` does not appear here
, otherFree -- We know that `x` did not appear here either
-- No need to update the `vs` because we've already
-- checked that there are no recursive dependencies.
, [ (y, (apSubst su1 ts1, vs1)) | (y,(ts1,vs1)) <- more ]
)
-- | Try to pick a reasonable instantiation for an expression, with
-- the given type. This is useful when we do evaluation at the REPL.
-- The resulting types should satisfy the constraints of the schema.
defaultReplExpr :: SolverConfig -> Expr -> Schema
-> IO (Maybe ([(TParam,Type)], Expr))
defaultReplExpr cfg e s =
if all (\v -> kindOf v == KNum) (sVars s)
then do let params = map tpVar (sVars s)
mbSubst <- tryGetModel cfg params (sProps s)
case mbSubst of
Just su ->
do (res,su1) <- Num.withSolver cfg $ \so ->
simpGoals' so (map (makeGoal su) (sProps s))
return $
case res of
Right [] | isEmptySubst su1 ->
do tys <- mapM (bindParam su) params
return (zip (sVars s) tys, appExpr tys)
_ -> Nothing
_ -> return Nothing
else return Nothing
where
makeGoal su p = Goal { goalSource = error "goal source"
, goalRange = error "goal range"
, goal = apSubst su p
}
bindParam su tp =
do let ty = TVar tp
ty' = apSubst su ty
guard (ty /= ty')
return ty'
appExpr tys = foldl (\e1 _ -> EProofApp e1) (foldl ETApp e tys) (sProps s)
-- | Attempt to default the given constraints by asserting them in the SMT
-- solver, and asking it for a model.
tryGetModel ::
SolverConfig ->
[TVar] -> -- variables to try defaulting
[Prop] -> -- constraints
IO (Maybe Subst)
tryGetModel cfg xs ps =
Num.withSolver cfg $ \ s ->
-- We are only interested in finite instantiations
Num.getModel s (map (pFin . TVar) xs ++ ps)
--------------------------------------------------------------------------------
simpType :: Type -> Type
simpType ty = fromMaybe ty (simpTypeMaybe ty)
simpProp :: Prop -> Prop
simpProp p = case p of
TUser f ts q -> TUser f (map simpType ts) (simpProp q)
TCon c ts -> TCon c (map simpType ts)
TVar {} -> panic "simpProp" ["variable", show p]
TRec {} -> panic "simpProp" ["record", show p]
simpTypeMaybe :: Type -> Maybe Type
simpTypeMaybe ty =
case ty of
TCon c ts ->
case c of
TF {} -> do e <- Num.exportType ty
e1 <- Num.crySimpExprMaybe e
Num.importType e1
_ -> TCon c `fmap` anyJust simpTypeMaybe ts
TVar _ -> Nothing
TUser x ts t -> TUser x ts `fmap` simpTypeMaybe t
TRec fs ->
do let (ls,ts) = unzip fs
ts' <- anyJust simpTypeMaybe ts
return (TRec (zip ls ts'))
--------------------------------------------------------------------------------
_testSimpGoals :: IO ()
_testSimpGoals = Num.withSolver cfg $ \s ->
do mapM_ dump asmps
mapM_ (dump .goal) gs
_ <- Num.assumeProps s asmps
_mbImps <- Num.check s
(mb,_) <- simpGoals' s gs
case mb of
Right _ -> debugLog s "End of test"
Left _ -> debugLog s "Impossible"
where
cfg = SolverConfig { solverPath = "cvc4"
, solverArgs = [ "--lang=smt2", "--incremental", "--rewrite-divk" ]
, solverVerbose = 1
}
asmps = []
gs = map fakeGoal [ tv 0 =#= tMin (num 10) (tv 1)
, tv 1 =#= num 10
]
fakeGoal p = Goal { goalSource = undefined, goalRange = undefined, goal = p }
tv n = TVar (TVFree n KNum Set.empty (text "test var"))
_btv n = TVar (TVBound n KNum)
num x = tNum (x :: Int)
dump a = do putStrLn "-------------------_"
case Num.exportProp a of
Just b -> do print $ Num.ppProp' $ Num.propToProp' b
putStrLn "-------------------"
Nothing -> print "can't export"
| iblumenfeld/cryptol | src/Cryptol/TypeCheck/Solve.hs | bsd-3-clause | 24,065 | 1 | 26 | 7,979 | 5,932 | 3,083 | 2,849 | 384 | 9 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Stack.Types.Image where
import Data.Aeson.Extended
import Data.Monoid
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (maybeToList)
import Data.Text (Text)
import GHC.Generics (Generic)
import Generics.Deriving.Monoid (mappenddefault, memptydefault)
import Path
import Prelude -- Fix redundant import warnings
-- | Image options. Currently only Docker image options.
newtype ImageOpts = ImageOpts
{ imgDockers :: [ImageDockerOpts]
-- ^ One or more stanzas for docker image settings.
} deriving (Show)
data ImageDockerOpts = ImageDockerOpts
{ imgDockerBase :: !(Maybe String)
-- ^ Maybe have a docker base image name. (Although we will not
-- be able to create any Docker images without this.)
, imgDockerEntrypoints :: !(Maybe [String])
-- ^ Maybe have a specific ENTRYPOINT list that will be used to
-- create images.
, imgDockerAdd :: !(Map FilePath (Path Abs Dir))
-- ^ Maybe have some static project content to include in a
-- specific directory in all the images.
, imgDockerImageName :: !(Maybe String)
-- ^ Maybe have a name for the image we are creating
, imgDockerExecutables :: !(Maybe [Path Rel File])
-- ^ Filenames of executables to add (if Nothing, add them all)
} deriving (Show)
newtype ImageOptsMonoid = ImageOptsMonoid
{ imgMonoidDockers :: [ImageDockerOpts]
} deriving (Show, Generic)
instance FromJSON (WithJSONWarnings ImageOptsMonoid) where
parseJSON = withObjectWarnings
"ImageOptsMonoid"
(\o ->
do (oldDocker :: Maybe ImageDockerOpts) <- jsonSubWarningsT (o ..:? imgDockerOldArgName)
(dockers :: [ImageDockerOpts]) <- jsonSubWarningsT (o ..:? imgDockersArgName ..!= [])
let imgMonoidDockers = dockers ++ maybeToList oldDocker
return
ImageOptsMonoid
{ ..
})
instance Monoid ImageOptsMonoid where
mempty = memptydefault
mappend = mappenddefault
instance FromJSON (WithJSONWarnings ImageDockerOpts) where
parseJSON = withObjectWarnings
"ImageDockerOpts"
(\o ->
do imgDockerBase <- o ..:? imgDockerBaseArgName
imgDockerEntrypoints <- o ..:? imgDockerEntrypointsArgName
imgDockerAdd <- o ..:? imgDockerAddArgName ..!= Map.empty
imgDockerImageName <- o ..:? imgDockerImageNameArgName
imgDockerExecutables <- o ..:? imgDockerExecutablesArgName
return
ImageDockerOpts
{ ..
})
imgArgName :: Text
imgArgName = "image"
-- Kept for backward compatibility
imgDockerOldArgName :: Text
imgDockerOldArgName = "container"
imgDockersArgName :: Text
imgDockersArgName = "containers"
imgDockerBaseArgName :: Text
imgDockerBaseArgName = "base"
imgDockerAddArgName :: Text
imgDockerAddArgName = "add"
imgDockerEntrypointsArgName :: Text
imgDockerEntrypointsArgName = "entrypoints"
imgDockerImageNameArgName :: Text
imgDockerImageNameArgName = "name"
imgDockerExecutablesArgName :: Text
imgDockerExecutablesArgName = "executables"
| mrkkrp/stack | src/Stack/Types/Image.hs | bsd-3-clause | 3,463 | 0 | 15 | 896 | 576 | 326 | 250 | 80 | 1 |
module Let where
import PointlessP.Functors
{- imports will be added for the PointlessP librasies -}
-- the whole expression will be selected for translation.
isort =
let leq = \x y -> if (==0) x then True
else if y==0 then False
else leq (pred x) (pred y)
in let insert = \x lst ->
if null lst then [x]
else if (leq x (head lst))
then x : lst
else (head lst) : (insert x (tail lst))
in let isort = \lst -> if (null lst)
then []
else insert (head lst) (isort (tail lst))
in isort
| kmate/HaRe | old/testing/pointwiseToPointfree/Let.hs | bsd-3-clause | 743 | 0 | 19 | 361 | 215 | 115 | 100 | 15 | 6 |
{-# LANGUAGE RankNTypes, TemplateHaskell #-}
module T11452 where
impred :: (forall a. a -> a) -> ()
impred = $$( [|| \_ -> () ||] )
| snoyberg/ghc | testsuite/tests/th/T11452.hs | bsd-3-clause | 134 | 2 | 8 | 28 | 51 | 31 | 20 | -1 | -1 |
{-# LANGUAGE LambdaCase, OverloadedStrings, FlexibleContexts #-}
module App (app) where
import Control.Monad.IO.Class (liftIO)
import Control.Monad
import Data.ByteString.Builder (Builder)
import qualified Data.Map as M
import Data.Maybe (fromJust)
import qualified Data.Text.Lazy as T
import qualified Data.Conduit.List as CL
import Data.Conduit (Source, Flush(..), ($$))
import Network.Wai (Application)
import Network.Wai.Middleware.Cors
import Web.Scotty
import Network.HTTP.Types.Status (unauthorized401)
import Control.Concurrent.STM (atomically, STM, readTVar)
import Octopus.Command
import Octopus.Jobs
import qualified Octopus.SerializableIO as SIO
import Octopus.Owner
import Octopus.TQueue
import Data.Conduit.TMChan (sourceTMChan, TMChan)
runAccounted :: Parsable t => (t -> IO a) -> OwnerMap -> (a -> ActionM ()) -> ActionM ()
runAccounted runner ownerMap liftOut = do
name <- param "name"
email <- header "from"
owner <- liftIO $ atomically $ lookupCreateOwner email ownerMap
eligible <- liftIO $ canEnqueue owner
case eligible of
True -> do
owner' <- liftIO $ bumpOwner owner ownerMap -- we may want to bump *after* the action is finished
addHeader "X-Owner" $ T.pack $ show owner'
output <- liftIO $ runner name
liftOut output
False -> do
status unauthorized401
addHeader "X-Owner" $ T.pack $ show owner
text $ mconcat ["quota exceeded: ", T.pack $ show owner, "\n"]
run :: Parsable t => (t -> IO a) -> (a -> ActionM ()) -> ActionM ()
run runner liftOut = do
name <- param "name"
output <- liftIO $ runner name
liftOut output
source :: Source IO (Flush Builder) -> ActionM ()
source src = stream $ \send flush ->
src $$ CL.mapM_ (\case Chunk b -> send b
Flush -> flush)
chanSource :: ChunkChan -> ActionM ()
chanSource = source <=< return . sourceTMChan
chanText :: TMChan T.Text -> ActionM ()
chanText = text <=< liftIO . fmap fromJust . SIO.awaitResult
app :: FilePath -> IO Application
app jobFile = do
cmdQ <- atomically $ (SIO.queueMap :: STM (SIO.ActionQueueMap Command))
ownerMap <- atomically $ emptyOwnerMap
scottyApp $ do
middleware simpleCors
get "/" $ json =<< liftIO jobs
-- this call should be available only for admin users
post "/concurrent/:name" $ run sourceRunner source
get "/queue/:name" $ run ((\comm -> atomically $ readTVar cmdQ >>= dumpTQueue . fromJust . M.lookup comm) <=< command) json
post "/enqueue/:name" $ dispatch cmdQ ownerMap chanSource
post "/qplain/:name" $ dispatch cmdQ ownerMap chanText
get "/attach/:name" $ setHeader "Cache-Control" "no-cache" >> run attachRunner chanSource
where
jobs :: IO JobsSpec
jobs = readJobs jobFile
command :: JobName -> IO Command
command name = jobs >>= return . fromJust . M.lookup name
sourceRunner :: JobName -> IO ChunkSource
sourceRunner = runCommandS <=< command
attachRunner :: JobName -> IO ChunkChan
attachRunner = fmap fromJust . atomically . SIO.attach <=< command
dispatch :: SIO.SerializableIO Command result => SIO.ActionQueueMap Command -> OwnerMap -> (TMChan result -> ActionM ()) -> ActionM ()
dispatch cmdQ = runAccounted $ command >=> SIO.dispatchAction cmdQ
| zalora/octopus | src/App.hs | isc | 3,447 | 0 | 20 | 836 | 1,068 | 537 | 531 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NamedFieldPuns #-}
module LR0 where
-- import Control.Monad (forM, forM_)
import Control.Monad.State (get, put, modify, runState)
import qualified Control.Monad.State as S (State)
import Data.List (intercalate, partition)
import Text.LaTeX
-- import Text.LaTeX.Base.Syntax (LaTeX(TeXRaw,TeXEnv), TeXArg(FixArg,OptArg))
-- import Text.LaTeX.Base.Class (LaTeXC, fromLaTeX, liftL, fromLaTeX)
import Text.LaTeX.Packages.Beamer
import Text.LaTeX.Packages.AMSMath (align_)
import Text.LaTeX.Packages.Trees.Forest (pforest)
-- import Text.LaTeX.Packages.Trees.Qtree
-- import Text.LaTeX.Packages.TikZ (tikz)
import Text.LaTeX.Packages.Inputenc
-- import Text.LaTeX.Packages.Color (textcolor, ColSpec(DefColor), Color(Blue))
import Grammar
groupBy' :: (a -> a -> Bool) -> [a] -> [[a]]
groupBy' _ [] = []
groupBy' eq (x:xs) = (x:ys) : groupBy' eq zs
where
(ys,zs) = partition (eq x) xs
elem' :: (a -> Bool) -> [a] -> Bool
elem' _ [] = False
elem' test (x:xs) = test x || elem' test xs
type Slide = Int
type Action = String
data Doc a = Doc { info :: a
, overlays :: [(Slide,Action)]
}
deriving (Show)
type DocItem = Doc (Doc Symbol,[Doc Symbol],[Doc Symbol])
type DocState = Doc ([DocItem],[DocItem])
type DocTrans = Doc [(State,Symbol,State)]
type DocGramar = Doc [Doc [Doc Symbol]]
data DocLR0 =
DocLR0
{ slide :: Slide
, grammar :: DocGramar
, st :: DocState
, trans :: DocTrans
}
deriving (Show)
nextSlide :: S.State DocLR0 Slide
nextSlide =
do doc@DocLR0{slide} <- get
let newSlide = slide + 1
put doc{slide = newSlide}
return newSlide
actProduction :: Int -> Action -> S.State DocLR0 ()
actProduction i action =
modify $ \doc@DocLR0{slide,grammar=Doc g govs} ->
let (ps1,Doc p povs:ps2) = splitAt i g
g' = ps1 ++ (Doc p ((slide,action):povs):ps2)
in doc{grammar=Doc g' govs}
actProductionSymbol :: Int -> Int -> Action -> S.State DocLR0 ()
actProductionSymbol i j action =
modify $ \doc@DocLR0{slide,grammar=Doc g govs} ->
let (ps1, Doc p povs : ps2) = splitAt i g
(ss1, Doc s sovs : ss2) = splitAt j p
p' = ss1 ++ Doc s ((slide,action):sovs) : ss2
g' = ps1 ++ Doc p' povs : ps2
in doc{grammar=Doc g' govs}
type Item = (Symbol,[Symbol],[Symbol])
type State = (Int,[Item])
showItem :: Item -> String
showItem (nt,xs,ys) =
show nt ++ " -> " ++
intercalate " " (map show (reverse xs)) ++ " . " ++
intercalate " " (map show ys)
showState :: State -> String
showState (counter,items) =
unlines (show counter : map showItem items)
showLR0 :: [State] -> String
showLR0 states =
unlines (map showState states)
startSymbol :: Grammar -> S.State DocLR0 Symbol
startSymbol (Grammar ((lhs:_):_)) = return lhs
initialDocLR0 :: Grammar -> DocLR0
initialDocLR0 (Grammar g) =
DocLR0
{ slide = 0
, grammar = Doc (map (\prod -> Doc (map (\sym -> Doc sym []) prod) []) g) []
, st = Doc ([],[]) []
, trans = Doc [] []
}
docLRLatex :: Monad m => DocLR0 -> LaTeXT_ m
docLRLatex x =
do documentclass [a4paper] beamer
-- raw "\\usepackage[lmargin=1cm]{geometry}"
-- raw "\\usepackage[]{longtable}"
usepackage [utf8] inputenc
usepackage [] pforest
-- usepackage [] qtree
-- usepackage [] "tikz-qtree"
-- usepackage [] tikz
author "José Romildo Malaquias"
title "LR Parsing"
usetheme CambridgeUS
document $
do frame maketitle
frame $
do frametitle "Construção da tabela LR(0)"
traceGrammar (grammar x)
traceGrammar :: Monad m => DocGramar -> LaTeXT_ m
traceGrammar (Doc prods _) =
align_ $ map (\(Doc (Doc nt _ : _) _) -> texy nt) prods
buildLR0 :: Grammar -> S.State DocLR0 ()
buildLR0 (Grammar _g) =
do actProduction 0 "invisible"
-- actProductionNumbers "invisible"
nextSlide
actProductionSymbol 1 0 "alert"
nextSlide
actProduction 0 "visible"
nextSlide
return ()
runLR0 :: Grammar -> DocLR0
runLR0 g =
snd (runState (buildLR0 g) (initialDocLR0 g))
-- selectProductions :: Grammar -> Symbol -> [[Symbol]]
-- selectProductions g lhs = filter ((== lhs) . head) g
-- mkItems :: Grammar -> Symbol -> [Item]
-- mkItems g nt =
-- map (\(lhs:rhs) -> (lhs,[],rhs)) (selectProductions g nt)
-- closure :: Grammar -> [Item] -> [Item]
-- closure g items =
-- go [] items
-- where
-- go items [] = reverse items
-- go items (i@(_,_,N x:_):is) =
-- go (i:items)
-- (is ++ filter (\i -> not (elem i items) && not (elem i is))
-- (mkItems g (N x)))
-- go items (i:is) = go (i:items) is
-- push :: Item -> Item
-- push (lhs,stack,x:xs) = (lhs,x:stack,xs)
-- lr0 :: Grammar -> [State]
-- lr0 g =
-- go 2 [] [(1,closure g (mkItems g (startSymbol g)))]
-- where
-- go _ states [] = reverse states
-- go counter states (st@(_,s0):ss) =
-- go counter' (st:states) (ss++ss6)
-- where
-- s1 = filter (\(_,_,ys) -> not (null ys)) s0
-- ss2 = groupBy' (\(_,_,x:_) (_,_,y:_) -> x == y) s1
-- ss3 = map (map push) ss2
-- ss4 = map (closure g) ss3
-- ss5 = filter (\x -> isNew x st && all (isNew x) states && all (isNew x) ss) ss4
-- ss6 = zip [counter ..] ss5
-- counter' = counter + length ss5
-- isNew x (_,y) = x /= y
| romildo/gsynt | src/LR0_.hs | isc | 5,537 | 0 | 18 | 1,440 | 1,558 | 845 | 713 | 105 | 1 |
import Data.Char
import Data.Int
lowers :: String -> Int
lowers xs = length [x | x <- xs, isLower x]
count :: Char -> String -> Int
count x xs = length [x' | x' <- xs, x == x']
positions :: Eq a => a -> [a] -> [Int]
positions x xs = [i | (x', i) <- zip xs [0..n], x == x']
where n = length xs - 1
let2int :: Char -> Int
let2int c = ord c - ord 'a'
int2let :: Int -> Char
int2let n = chr(ord 'a' + n)
shift :: Int -> Char -> Char
shift n c | isLower c = int2let ((let2int c + n) `mod` 26)
| otherwise = c
encode :: Int -> String -> String
encode n xs = [shift n x | x <- xs]
table :: [Float]
table = [ 8.2, 1.5, 2.8, 4.3, 12.7, 2.2, 2.0, 6.1, 7.0, 0.2, 0.8, 4.0, 2.4,
6.7, 7.5, 1.9, 0.1, 6.0, 6.3, 9.1, 2.8, 1.0, 2.4, 0.2, 2.0, 0.1 ]
percent :: Int -> Int -> Float
percent n m = (fromIntegral n / fromIntegral m) * 100
freqs :: String -> [Float]
freqs xs = [percent (count x xs) n | x <- ['a'..'z']]
where n = lowers xs
chisqr :: [Float ] -> [Float ] -> Float
chisqr os es = sum[((o - e) ^ 2)/e | (o,e) <- zip os es]
rotate :: Int -> [a] -> [a]
rotate n xs = drop n xs ++ take n xs
crack :: String -> String
crack xs = encode (-factor) xs
where factor = head (positions (minimum chitab) chitab)
chitab = [chisqr (rotate n table') table | n <- [0..25]]
table' = freqs xs
| jugalps/edX | FP101x/week4/ceaser.hs | mit | 1,376 | 0 | 11 | 396 | 736 | 392 | 344 | 35 | 1 |
module Protocol where
import Data.Monoid ((<>))
import Data.Foldable
import Control.Monad
import Test.Tasty
import Test.Tasty.HUnit
import Database.PostgreSQL.Driver.Connection
import Database.PostgreSQL.Driver.StatementStorage
import Database.PostgreSQL.Driver.Query
import Database.PostgreSQL.Driver.Error
import Database.PostgreSQL.Protocol.Types
import Database.PostgreSQL.Protocol.Codecs.Encoders as PE
import Connection
testProtocolMessages :: TestTree
testProtocolMessages = testGroup "Protocol messages"
[ testCase "Simple query " testSimpleQuery
, testCase "Extended query" testExtendedQuery
, testCase "Extended query empty query" testExtendedEmptyQuery
, testCase "Extended query no data" testExtendedQueryNoData
]
-- | Tests multi-command simple query.
testSimpleQuery :: IO ()
testSimpleQuery = withConnectionCommonAll $ \c -> do
let rawConn = connRawConnection c
statement = StatementSQL $
"DROP TABLE IF EXISTS a;"
<> "CREATE TABLE a(v int);"
<> "INSERT INTO a VALUES (1), (2), (3);"
<> "SELECT * FROM a;"
<> "DROP TABLE a;"
sendMessage rawConn $ SimpleQuery statement
msgs <- collectUntilReadyForQuery c
assertNoErrorResponse msgs
assertContains msgs isCommandComplete "Command complete"
where
isCommandComplete (CommandComplete _) = True
isCommandComplete _ = False
-- Tests all messages that are permitted in extended query protocol.
testExtendedQuery :: IO ()
testExtendedQuery = withConnectionCommonAll $ \c -> do
let rawConn = connRawConnection c
sname = StatementName "statement"
pname = PortalName "portal"
statement = StatementSQL "SELECT $1 + $2"
sendMessage rawConn $ Parse sname statement [Oid 23, Oid 23]
sendMessage rawConn $
Bind pname sname Text [Just $ PE.bytea "1", Just $ PE.bytea "2"] Text
sendMessage rawConn $ Execute pname noLimitToReceive
sendMessage rawConn $ DescribeStatement sname
sendMessage rawConn $ DescribePortal pname
sendMessage rawConn $ CloseStatement sname
sendMessage rawConn $ ClosePortal pname
sendMessage rawConn Flush
sendMessage rawConn Sync
msgs <- collectUntilReadyForQuery c
assertNoErrorResponse msgs
assertContains msgs isBindComplete "BindComplete"
assertContains msgs isCloseComplete "CloseComplete"
assertContains msgs isParseComplete "ParseComplete"
assertContains msgs isDataRow "DataRow"
assertContains msgs isCommandComplete "CommandComplete"
assertContains msgs isParameterDecription "ParameterDescription"
assertContains msgs isRowDescription "RowDescription"
where
isBindComplete BindComplete = True
isBindComplete _ = False
isCloseComplete CloseComplete = True
isCloseComplete _ = False
isParseComplete ParseComplete = True
isParseComplete _ = False
isDataRow DataRow{} = True
isDataRow _ = False
isCommandComplete (CommandComplete _) = True
isCommandComplete _ = False
isParameterDecription (ParameterDescription _) = True
isParameterDecription _ = False
isRowDescription (RowDescription _) = True
isRowDescription _ = False
-- | Tests that PostgreSQL returns `EmptyQueryResponse` when a query
-- string is empty.
testExtendedEmptyQuery :: IO ()
testExtendedEmptyQuery = withConnectionCommonAll $ \c -> do
let rawConn = connRawConnection c
sname = StatementName "statement"
pname = PortalName ""
statement = StatementSQL ""
sendMessage rawConn $ Parse sname statement []
sendMessage rawConn $
Bind pname sname Text [] Text
sendMessage rawConn $ Execute pname noLimitToReceive
sendMessage rawConn Sync
msgs <- collectUntilReadyForQuery c
assertNoErrorResponse msgs
assertContains msgs isEmptyQueryResponse "EmptyQueryResponse"
where
isEmptyQueryResponse EmptyQueryResponse = True
isEmptyQueryResponse _ = False
-- | Tests that `desribe statement` receives NoData when a statement
-- has no data in the result.
testExtendedQueryNoData :: IO ()
testExtendedQueryNoData = withConnectionCommonAll $ \c -> do
let rawConn = connRawConnection c
sname = StatementName "statement"
statement = StatementSQL "SET client_encoding to UTF8"
sendMessage rawConn $ Parse sname statement []
sendMessage rawConn $ DescribeStatement sname
sendMessage rawConn Sync
msgs <- collectUntilReadyForQuery c
assertContains msgs isNoData "NoData"
where
isNoData NoData = True
isNoData _ = False
-- | Assert that list contains element satisfies predicat.
assertContains
:: Either Error [ServerMessage]
-> (ServerMessage -> Bool)
-> String -> Assertion
assertContains (Left e) _ _ = assertFailure $ "Got Error" ++ show e
assertContains (Right msgs) f name =
assertBool ("Does not contain" ++ name) $ any f msgs
-- | Assert there are on `ErrorResponse` in the list.
assertNoErrorResponse :: Either Error [ServerMessage] -> Assertion
assertNoErrorResponse (Left e) = assertFailure $ "Got Error" ++ show e
assertNoErrorResponse (Right msgs) =
assertBool "Occured ErrorResponse" $ all (not . isError) msgs
where
isError (ErrorResponse _) = True
isError _ = False
| postgres-haskell/postgres-wire | tests/Protocol.hs | mit | 5,631 | 0 | 16 | 1,429 | 1,199 | 580 | 619 | 114 | 8 |
module Provider
( FakeProvider()
, activeInteractions
, verifiedInteractions
, addInteraction
, setInteractions
, resetInteractions
, findInteractionForRequest
, recordRequest
, verifyInteractions
, getVerifiedInteractions
, FakeProviderState
, newFakeProviderState
, run
) where
import Control.Concurrent.STM
import Control.Monad.State
import qualified Data.List as L
import qualified Pact as P
data FakeProvider = FakeProvider
{ activeInteractions :: [P.Interaction]
, matchedInteractions :: [P.Interaction]
, verifiedInteractions :: [P.Interaction]
, mismatchedRequests :: [P.Request]
} deriving (Show)
initialFakeProvider :: FakeProvider
initialFakeProvider = FakeProvider [] [] [] []
addInteraction :: P.Interaction -> State FakeProvider ([P.Interaction])
addInteraction i = do
modify $ \p -> p { activeInteractions = (i : activeInteractions p) }
gets $ \p -> activeInteractions p
setInteractions :: [P.Interaction] -> State FakeProvider ()
setInteractions is = modify $ \p -> p { activeInteractions = is }
resetInteractions :: State FakeProvider ()
resetInteractions = modify $ \p -> p { activeInteractions = [], mismatchedRequests = [], matchedInteractions = [] }
findInteractionForRequest :: P.Request -> State FakeProvider ([(P.Interaction, [P.ValidationError])])
findInteractionForRequest req = gets $ \p -> map toTuple $ activeInteractions p
where
toTuple :: P.Interaction -> (P.Interaction, [P.ValidationError])
toTuple interaction = (interaction, P.validateRequest (P.interactionRequest interaction) req)
addInteractionMatch :: P.Interaction -> State FakeProvider ()
addInteractionMatch i = modify $ \p -> p
{ matchedInteractions = (i : matchedInteractions p)
, verifiedInteractions = (i : verifiedInteractions p)
}
addMismatchedRequest :: P.Request -> State FakeProvider ()
addMismatchedRequest i = modify $ \p -> p { mismatchedRequests = (i : mismatchedRequests p) }
recordRequest :: P.Request -> State FakeProvider (Either [(P.Interaction, [P.ValidationError])] P.Interaction)
recordRequest req = do
interactions <- findInteractionForRequest req
let (successful, failed) = L.partition (null . snd) interactions
case successful of
-- we only care about the first match and ignore later matches
((interaction, _):_) -> do
addInteractionMatch interaction
pure (Right interaction)
[] -> do
addMismatchedRequest req
pure (Left failed)
verifyInteractions :: State FakeProvider (Bool, [P.Request], [P.Interaction], [P.Interaction])
verifyInteractions = gets $ \p ->
let isSuccess = (length $ mismatchedRequests p) == 0 && (length $ matchedInteractions p) == (length $ activeInteractions p)
in (isSuccess, mismatchedRequests p, matchedInteractions p, activeInteractions p) where
getVerifiedInteractions :: State FakeProvider [P.Interaction]
getVerifiedInteractions = gets verifiedInteractions
-- Transactions
type FakeProviderState = TVar FakeProvider
type FakeProviderTX a = FakeProviderState -> STM (a, FakeProvider)
newFakeProviderState :: IO FakeProviderState
newFakeProviderState = newTVarIO initialFakeProvider
liftTX :: State FakeProvider a -> FakeProviderTX a
liftTX m v = do
(a, s) <- runState m <$> readTVar v
writeTVar v s
return (a, s)
runTX :: FakeProviderState -> FakeProviderTX a -> IO a
runTX fps tx = atomically (tx fps) >>= \(a, _) -> pure a
run :: FakeProviderState -> State FakeProvider a -> IO a
run fps = runTX fps . liftTX
| mannersio/manners | cli/src/Provider.hs | mit | 3,463 | 0 | 16 | 547 | 1,093 | 588 | 505 | 76 | 2 |
module Proxy.Query.ImportAllQueries (
module Proxy.Query.Tech,
module Proxy.Query.Upgrade,
module Proxy.Query.Unit) where
import Proxy.Query.Tech
import Proxy.Query.Upgrade
import Proxy.Query.Unit
| mapinguari/SC_HS_Proxy | src/Proxy/Query/ImportAllQueries.hs | mit | 204 | 0 | 5 | 23 | 48 | 33 | 15 | 7 | 0 |
module Validation where
import Data.Either.Validation
import Control.Applicative
import Data.Semigroup
data Errors =
DividedByZero
| StackOverflow
| MooglesChewedWires
deriving (Eq, Show)
-- using Validation rather than Either ...
success :: Validation [Errors] Int
success = Success (+1) <*> Success 1
failure = Success (+1) <*> Failure [StackOverflow]
failure' :: Validation [Errors] Int
failure' = Failure [StackOverflow] <*> Success (+1)
failures :: Validation [Errors] Int
failures = Failure [MooglesChewedWires] <*> Failure [StackOverflow]
| NickAger/LearningHaskell | HaskellProgrammingFromFirstPrinciples/Chapter17/Validation/src/Validation.hs | mit | 558 | 0 | 7 | 82 | 168 | 94 | 74 | 16 | 1 |
{-|
Module : Web.Facebook.Messenger.Types.Callbacks.Read
Copyright : (c) Felix Paulusma, 2016
License : MIT
Maintainer : [email protected]
Stability : semi-experimental
This callback will occur when a message a page has sent has been read by the user.
You can subscribe to this callback by selecting the @"message_reads"@ field when setting up your webhook.
https://developers.facebook.com/docs/messenger-platform/reference/webhook-events/message-reads
-}
module Web.Facebook.Messenger.Types.Callbacks.Read (
-- * Message Read Callback
ReadCallback (..)
)
where
import Data.Aeson
import Web.Facebook.Messenger.Internal
-- --------------- --
-- READ CALLBACK --
-- --------------- --
-- | Callback that certain messages have been read
--
-- The `rWatermark` field is used to determine which messages were read.
-- It represents a timestamp indicating that all messages with a timestamp before watermark were read by the recipient.
data ReadCallback = ReadCallback
{ rWatermark :: Integer -- ^ All messages that were sent before this timestamp were read
, rSeq :: Maybe Integer -- ^ Sequence number
} deriving (Eq, Show, Read, Ord)
-- ---------------- --
-- READ INSTANCES --
-- ---------------- --
instance FromJSON ReadCallback where
parseJSON = withObject "ReadCallback" $ \o ->
ReadCallback <$> o .: "watermark"
<*> o .:? "seq"
instance ToJSON ReadCallback where
toJSON (ReadCallback watermark seq') =
object' [ "watermark" .=! watermark
, "seq" .=!! seq'
]
| Vlix/facebookmessenger | src/Web/Facebook/Messenger/Types/Callbacks/Read.hs | mit | 1,579 | 0 | 11 | 317 | 170 | 102 | 68 | 16 | 0 |
-- |
-- The @FP15.Evaluator@ module contains the Haskell implementation of FP15.
-- The evaluator accepts FP15 definitions in the form of 'BaseExpr', which is generated
-- by the compiler, and translates the definitions into a map of Haskell functions.
module FP15.Evaluator where
import FP15.Evaluator.Types()
import FP15.Evaluator.Contract()
import FP15.Evaluator.Number()
import FP15.Evaluator.Standard()
import FP15.Evaluator.Translation()
| Ming-Tang/FP15 | src/FP15/Evaluator.hs | mit | 445 | 0 | 4 | 54 | 55 | 38 | 17 | 6 | 0 |
module U.Codebase.Sqlite.Patch.TermEdit where
import Data.Bifoldable (Bifoldable (bifoldMap))
import Data.Bifunctor (Bifunctor (bimap))
import Data.Bitraversable (Bitraversable (bitraverse))
import U.Codebase.Reference (Reference')
import qualified U.Codebase.Referent as Referent
import qualified U.Codebase.Sqlite.DbId as Db
import U.Codebase.Sqlite.LocalIds (LocalDefnId, LocalTextId)
type TermEdit = TermEdit' Db.TextId Db.ObjectId
type LocalTermEdit = TermEdit' LocalTextId LocalDefnId
type Referent' t h = Referent.Referent' (Reference' t h) (Reference' t h)
data TermEdit' t h = Replace (Referent' t h) Typing | Deprecate
deriving (Eq, Ord, Show)
-- Replacements with the Same type can be automatically propagated.
-- Replacements with a Subtype can be automatically propagated but may result in dependents getting more general types, so requires re-inference.
-- Replacements of a Different type need to be manually propagated by the programmer.
data Typing = Same | Subtype | Different
deriving (Eq, Ord, Show)
instance Bifunctor TermEdit' where
bimap f g (Replace r t) = Replace (bimap (bimap f g) (bimap f g) r) t
bimap _ _ Deprecate = Deprecate
instance Bifoldable TermEdit' where
bifoldMap f g (Replace r _t) = bifoldMap (bifoldMap f g) (bifoldMap f g) r
bifoldMap _ _ Deprecate = mempty
instance Bitraversable TermEdit' where
bitraverse f g (Replace r t) = Replace <$> bitraverse (bitraverse f g) (bitraverse f g) r <*> pure t
bitraverse _ _ Deprecate = pure Deprecate
| unisonweb/platform | codebase2/codebase-sqlite/U/Codebase/Sqlite/Patch/TermEdit.hs | mit | 1,508 | 0 | 10 | 237 | 434 | 241 | 193 | 24 | 0 |
{-# LANGUAGE QuasiQuotes #-}
module Grammar.Greek.Morph.Forms.Enclitic where
import Grammar.Greek.Morph.QuasiQuoters
import Grammar.Greek.Morph.Types
-- Smyth 181
-- εἰμί pres indic except εἶ
-- φημί pres indic except φῄς
-- the inseparable -δε in ὅδε, τοσόσδε
encliticForms :: [ AccentedWord ]
encliticForms = [accentedWords|
μοῦ
μοί
μέ
σοῦ
σοί
σέ
οὗ
οἷ
ἕ
τίς
τί
τοῦ
τῷ
τινός
τινί
πού
ποθί
πῄ
ποί
ποθέν
ποτέ
πώ
πώς
ἐστί
ἐστίν
ἐσμέν
ἐστέ
εἰσί
εἰσίν
φημί
φησί
φαμέν
φατέ
φασί
φασίν
γέ
τέ
τοί
πέρ
|]
| ancientlanguage/haskell-analysis | greek-morph/src/Grammar/Greek/Morph/Forms/Enclitic.hs | mit | 784 | 0 | 5 | 171 | 45 | 34 | 11 | 6 | 1 |
{-# LANGUAGE OverloadedStrings, TypeApplications #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# OPTIONS_GHC -fdefer-typed-holes #-}
module Main where
import MockAPI
import Servant
import Network.Wai.Handler.Warp
import Data.Text (Text)
import Control.Concurrent (threadDelay)
import Control.Monad.IO.Class
import qualified Data.Map as M
import Network.Wai.Middleware.Gzip
import Shaped
import Generics.SOP
import Data.Functor.Const
server :: Server MockApi
server = combinedValidations :<|> serveAssets :<|> serveJS
where
serveAssets = serveDirectory "./mockClient/assets"
serveJS = serveDirectory "./mockClient/js/"
authenticate :: (Monad m, MonadIO m) => User -> m Text
authenticate u
| correctInfo = liftIO (threadDelay 1000000) >> return "Authenticated"
| userPresent = liftIO (threadDelay 1000000) >> return "Wrong password"
| otherwise = liftIO (threadDelay 1000000) >> return "Not Authenticated"
where
users = M.fromList [ ("[email protected]", "pass1")
, ("[email protected]", "pass2")
, ("[email protected]", "pass3")
]
correctInfo = M.lookup (userMail u) users == Just (userPassword u)
userPresent = userMail u `elem` M.keys users
serverSideValidation :: (Monad m, MonadIO m) => User -> m (Either (UserShaped (Const (Maybe Text))) User)
serverSideValidation u
| correctInfo = do
liftIO (threadDelay 1000000)
return (Right u)
| not userPresent = do
liftIO (threadDelay 1000000)
return . Left $ UserShaped (Const $ Just "The user mail is not present") (Const Nothing)
| otherwise = do
liftIO (threadDelay 1000000)
return . Left $ UserShaped (Const Nothing) (Const $ Just "The password is wrong")
where
users = M.fromList [ ("[email protected]", "pass1")
, ("[email protected]", "pass2")
, ("[email protected]", "pass3")
]
correctInfo = M.lookup (userMail u) users == Just (userPassword u)
userPresent = userMail u `elem` M.keys users
combinedValidations :: (Monad m, MonadIO m) => User -> m (Either (UserShaped (Const (Maybe Text))) User)
combinedValidations u = case transfGen $ validateRecord u clientValidation of
Left a -> return $ Left a
Right b -> serverSideValidation b
main :: IO ()
main = run 8081 (gzip gzipSettings $ serve (Proxy @MockApi) server)
where
gzipSettings = def { gzipFiles = GzipCompress }
| meditans/haskell-webapps | UI/ReflexFRP/mockLoginPage/mockServer/Main.hs | mit | 2,454 | 0 | 15 | 547 | 760 | 390 | 370 | 52 | 2 |
module Main where
import System.Environment(getArgs)
import System.Exit(exitWith)
import System.Process(rawSystem)
import SHK
main :: IO ()
main = do
args <- getArgs
let srcFiles = filter isJSHSource args
mapM_ kompile srcFiles
let jArgs = map (\a -> if isJSHSource a then toJavaFileExt a else a) args
exitCode <- rawSystem "javac" jArgs
exitWith exitCode
| taxell/SHK | src/Main.hs | mit | 365 | 0 | 14 | 61 | 135 | 68 | 67 | 13 | 2 |
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-}
-- | Reexports "Prelude.Compat"
-- from a globally unique namespace.
module Prelude.Compat.Repl (
module Prelude.Compat
) where
import "this" Prelude.Compat
| haskell-compat/base-compat | base-compat/src/Prelude/Compat/Repl.hs | mit | 260 | 0 | 5 | 31 | 25 | 18 | 7 | 5 | 0 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.CustomEvent
(js_initCustomEvent, initCustomEvent, js_getDetail, getDetail,
CustomEvent, castToCustomEvent, gTypeCustomEvent)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe
"$1[\"initCustomEvent\"]($2, $3,\n$4, $5)" js_initCustomEvent ::
CustomEvent -> JSString -> Bool -> Bool -> JSVal -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent.initCustomEvent Mozilla CustomEvent.initCustomEvent documentation>
initCustomEvent ::
(MonadIO m, ToJSString typeArg) =>
CustomEvent -> typeArg -> Bool -> Bool -> JSVal -> m ()
initCustomEvent self typeArg canBubbleArg cancelableArg detailArg
= liftIO
(js_initCustomEvent (self) (toJSString typeArg) canBubbleArg
cancelableArg
detailArg)
foreign import javascript unsafe "$1[\"detail\"]" js_getDetail ::
CustomEvent -> IO JSVal
-- | <https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent.detail Mozilla CustomEvent.detail documentation>
getDetail :: (MonadIO m) => CustomEvent -> m JSVal
getDetail self = liftIO (js_getDetail (self)) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/CustomEvent.hs | mit | 1,963 | 20 | 9 | 290 | 486 | 295 | 191 | 33 | 1 |
{- |
Module : $Header$
Description : coerce logic entities dynamically
Copyright : (c) T. Mossakowski, C. Maeder, Uni Bremen 2005-2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : [email protected]
Stability : provisional
Portability : non-portable (various -fglasgow-exts extensions)
Functions for coercion used in Grothendieck.hs and Analysis modules:
These tell the typechecker that things dynamically belong to the same logic
-}
module Logic.Coerce where
import Logic.Logic
import Logic.Prover
import Common.ExtSign
import Common.Id
import Common.Result
import Common.AS_Annotation
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.Dynamic
import ATC.LibName ()
import ATC.Prover ()
import ATC.ExtSign ()
-- coercion using the language name
primCoerce :: (Typeable a, Typeable b, Language lid1, Language lid2,
Monad m) => lid1 -> lid2 -> String -> a -> m b
primCoerce i1 i2 err a =
if language_name i1 == language_name i2
then return $ fromDyn (toDyn a) $ error "primCoerce"
else fail $ (if null err then "" else err ++ ": ") ++ "Logic "
++ language_name i2 ++ " expected, but "
++ language_name i1 ++ " found"
unsafeCoerce :: (Typeable a, Typeable b, Language lid1, Language lid2)
=> lid1 -> lid2 -> a -> b
unsafeCoerce i1 i2 a = maybe (error "unsafeCoerce") id $ primCoerce i1 i2 "" a
coerceToResult :: (Typeable a, Typeable b, Language lid1, Language lid2) =>
lid1 -> lid2 -> Range -> a -> Result b
coerceToResult i1 i2 pos a = adjustPos pos $ primCoerce i1 i2 "" a
coerceSublogic ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m)
=> lid1 -> lid2 -> String -> sublogics1 -> m sublogics2
coerceSublogic l1 l2 msg s1 = primCoerce l1 l2 msg s1
forceCoerceSublogic ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2)
=> lid1 -> lid2 -> sublogics1 -> sublogics2
forceCoerceSublogic l1 l2 s1 = unsafeCoerce l1 l2 s1
coercePlainSign ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String -> sign1 -> m sign2
coercePlainSign l1 l2 msg s1 = primCoerce l1 l2 msg s1
coerceSign ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String -> ExtSign sign1 symbol1
-> m (ExtSign sign2 symbol2)
coerceSign l1 l2 msg s1 = primCoerce l1 l2 msg s1
coerceBasicTheory ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String
-> (sign1, [Named sentence1]) -> m (sign2, [Named sentence2])
coerceBasicTheory l1 l2 msg t1 = primCoerce l1 l2 msg t1
coerceSens ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String
-> [Named sentence1] -> m [Named sentence2]
coerceSens l1 l2 msg t1 = primCoerce l1 l2 msg t1
coerceMorphism ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String -> morphism1 -> m morphism2
coerceMorphism l1 l2 msg m1 = primCoerce l1 l2 msg m1
coerceSymbol ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2)
=> lid1 -> lid2 -> symbol1 -> symbol2
coerceSymbol l1 l2 s1 = unsafeCoerce l1 l2 s1
coerceSymbolmap ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Typeable a)
=> lid1 -> lid2 -> Map.Map symbol1 a
-> Map.Map symbol2 a
coerceSymbolmap l1 l2 sm1 = unsafeCoerce l1 l2 sm1
coerceMapofsymbol ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Typeable a)
=> lid1 -> lid2 -> Map.Map a symbol1
-> Map.Map a symbol2
coerceMapofsymbol l1 l2 sm1 = unsafeCoerce l1 l2 sm1
coerceSymbItemsList ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String -> [symb_items1] -> m [symb_items2]
coerceSymbItemsList l1 l2 msg m1 = primCoerce l1 l2 msg m1
coerceSymbMapItemsList ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String
-> [symb_map_items1] -> m [symb_map_items2]
coerceSymbMapItemsList l1 l2 msg m1 = primCoerce l1 l2 msg m1
coerceProofStatus ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String
-> ProofStatus proof_tree1 -> m (ProofStatus proof_tree2)
coerceProofStatus l1 l2 msg m1 = primCoerce l1 l2 msg m1
coerceSymbolSet ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String -> Set.Set symbol1 -> m (Set.Set symbol2)
coerceSymbolSet l1 l2 msg m1 = primCoerce l1 l2 msg m1
coerceRawSymbolMap ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String -> EndoMap raw_symbol1
-> m (EndoMap raw_symbol2)
coerceRawSymbolMap l1 l2 msg m1 = primCoerce l1 l2 msg m1
coerceFreeDefMorphism ::
(Logic lid1 sublogics1 basic_spec1 sentence1 symb_items1 symb_map_items1
sign1 morphism1 symbol1 raw_symbol1 proof_tree1,
Logic lid2 sublogics2 basic_spec2 sentence2 symb_items2 symb_map_items2
sign2 morphism2 symbol2 raw_symbol2 proof_tree2,
Monad m) => lid1 -> lid2 -> String
-> FreeDefMorphism sentence1 morphism1
-> m (FreeDefMorphism sentence2 morphism2)
coerceFreeDefMorphism l1 l2 msg freedef = primCoerce l1 l2 msg freedef
| nevrenato/Hets_Fork | Logic/Coerce.hs | gpl-2.0 | 8,729 | 0 | 14 | 1,971 | 2,206 | 1,116 | 1,090 | 152 | 3 |
{-# LANGUAGE ScopedTypeVariables, ExistentialQuantification, GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
-- Copyright (C) JP Bernardy 2009
-- | This module defines implementations of syntax-awareness drivers.
module Yi.Syntax.Driver where
import Yi.Prelude
import Prelude ()
import Data.List (takeWhile, unzip)
import Yi.Syntax hiding (Cache)
import Yi.Syntax.Tree
import Yi.Lexer.Alex (Tok)
import Yi.Region
import qualified Data.Map as M
import Data.Map (Map)
type Path = [Int]
data Cache state tree tt = Cache {
path :: M.Map Int Path,
cachedStates :: [state],
root :: tree (Tok tt),
focused :: !(M.Map Int (tree (Tok tt)))
}
mkHighlighter :: forall state tree tt. (IsTree tree, Show state) =>
(Scanner Point Char -> Scanner state (tree (Tok tt))) ->
Highlighter (Cache state tree tt) (tree (Tok tt))
mkHighlighter scanner =
Yi.Syntax.SynHL
{ hlStartState = Cache M.empty [] emptyResult M.empty
, hlRun = updateCache
, hlGetTree = \(Cache _ _ _ focused) w -> M.findWithDefault emptyResult w focused
, hlFocus = focus
}
where startState :: state
startState = scanInit (scanner emptyFileScan)
emptyResult = scanEmpty (scanner emptyFileScan)
updateCache :: Scanner Point Char -> Point -> Cache state tree tt -> Cache state tree tt
updateCache newFileScan dirtyOffset (Cache path cachedStates oldResult _) = Cache path newCachedStates newResult M.empty
where newScan = scanner newFileScan
reused :: [state]
reused = takeWhile ((< dirtyOffset) . scanLooked (scanner newFileScan)) cachedStates
resumeState :: state
resumeState = if null reused then startState else last reused
newCachedStates = reused ++ fmap fst recomputed
recomputed = scanRun newScan resumeState
newResult :: tree (Tok tt)
newResult = if null recomputed then oldResult else snd $ head $ recomputed
focus r c@(Cache path states root _focused) =
(Cache path' states root focused)
where (path', focused) = unzipFM $ zipWithFM (\newpath oldpath -> fromNodeToFinal newpath (oldpath,root)) [] r path
unzipFM :: Ord k => [(k,(u,v))] -> (Map k u, Map k v)
unzipFM l = (M.fromList mu, M.fromList mv)
where (mu, mv) = unzip [((k,u),(k,v)) | (k,(u,v)) <- l]
zipWithFM :: Ord k => (u -> v -> w) -> v -> Map k u -> Map k v -> [(k,w)]
zipWithFM f v0 mu mv = [ (k,f u (M.findWithDefault v0 k mv) ) | (k,u) <- M.assocs mu]
emptyFileScan :: Scanner Point Char
emptyFileScan = Scanner { scanInit = 0,
scanRun = const [],
scanLooked = id,
scanEmpty = error "emptyFileScan: no scanEmpty" }
| codemac/yi-editor | src/Yi/Syntax/Driver.hs | gpl-2.0 | 3,085 | 0 | 15 | 1,020 | 961 | 526 | 435 | 55 | 3 |
module Main where
import qualified Control.Exception as E
import PrelSequent
import Sequent
import Interaction
import PSequent
import PrSequent
import Calculi
import Axioms
import Natural
import System.Process (system)
import System.IO (hFlush, stdout)
import System.IO.Error (isEOFError)
-- sequent calculus proof editor. Aarne Ranta 8/4/1999 -- 26/4 -- 14/11/2000
main :: IO ()
main = do
putStr welcomeMsg
_ <- editProofs ((rulesOfCalculus (Calculus ["G3i"]), Goal ([],[])),[])
return ()
welcomeMsg =
"\nWelcome to a proof editor for sequent calculus, version January 1, 2015." ++++
"Originally written by Aarne Ranta, and modified by Tomoaki Hashizaki [email protected]" ++++
"Starting with the calculus G3i (intuitionistic predicate calculus)." ++++
"Type ? for help on available commands.\n\n"
editProofs :: (Env,[String]) -> IO (Proof,String)
editProofs envh@(env@(calculus,tree),history) =
do
putStr "|- "
hFlush stdout
s <- getLine `E.catch` (\e -> if isEOFError e then return "q" else ioError e)
let (comm,m0) = case pCommand calculus s of
(x,""):_ -> (x, "")
_ -> (CVoid, "No parse of command\n")
(tree',m1) = exec comm env
(env',msg) = ((calculus,tree'), m0 ++ m1)
history' = history ++ [s]
envh' = (env', history')
in
do
putStr "\n"
putStr msg
putStr "\n"
case comm of
CAxioms file -> do s <- readFileIf file
let (na@(_,a),m) = readAxioms s
a' = axioms2calculus a in
do putStr m
writeAndLatexFile (file ++".rules") (prLatexAxioms na)
editProofs ((calculus ++ a', tree'),history')
CLatex file -> do writeAndLatexFile file (prLatexProof tree')
editProofs envh'
CNatural file -> do let nat = proof2nat tree'
writeAndLatexFile file (prLatexFile (prNatProof nat))
editProofs envh'
CHistory file -> do writeFile file (foldr (++++) "" (history ++ ["q"]))
editProofs envh'
CChange calc' -> let (c,m) = changeCalculus calculus calc' in
do putStr m
editProofs ((c,tree'),history')
CQuit -> do writeFile "myhistory.txt"
(foldr (++++) "" (history ++ ["q"]))
putStr "history written in myhistory.txt\n"
return (tree',"")
CManual -> do system "latex manual.tex >& /dev/null ; xdvi manual.dvi &"
editProofs envh'
_ -> editProofs envh'
-----------------------
writeAndLatexFile file content = do
writeFile (file ++ ".tex") content
system ("latex" +++ file ++ ".tex >& /dev/null ; xdvi" +++ file ++ ".dvi &")
return ()
type Env = (AbsCalculus, Proof)
data Command =
CRefine [Int] (Int,Int) Ident
| CInstance Ident Term
| CTry [Int] Int
| CNew Sequent
| CRemove [Int]
| CShowGoals
| CShowTree
| CShowApplicable [Int]
| CChange Calculus
| CAxioms String
| CQuit
| CLatex String
| CNatural String
| CHistory String
| CHelp
| CManual
| CVoid
exec :: Command -> Env -> (Proof,String)
exec c env@(calculus,tree) =
case c of
CRefine g (i,j) r -> (t, m ++++ prProof t)
where (t,m) = refine (calculus,tree) g (i,j) r
CInstance par term -> (t, prProof t)
where t = instantiate calculus par term tree
CTry g limit -> (t, m ++++ prProof t)
where (t,m) = tryRefine (calculus,tree) g limit
CNew sequent -> (t, prProof t)
where t = Goal sequent
CRemove ints -> (t, m ++++ prProof t)
where (t,m) = remove tree ints
CShowGoals -> (tree, prGoals (goalsOfProof tree))
CShowTree -> (tree, prProofNodes True tree)
CShowApplicable g -> (tree, showAllApplicableRules (calculus,tree) g)
CChange _ -> (tree, "")
CAxioms file -> (tree, "axioms written in" +++ file ++ ".rules.tex")
CLatex file -> (tree, "proof written in" +++ file ++ ".tex")
CNatural file -> (tree, "natural deduction tree written in" +++ file ++ ".tex")
CHistory file -> (tree, "history written in" +++ file)
CHelp -> (tree, helpMessage)
CManual -> (tree, "")
CQuit -> (tree, "Goodbye")
_ -> (tree, "no command processed")
helpMessage =
"Commands:\n" ++++
" r goal [A int] [S int] rule - refine goal with rule (A changes active" ++++
" formula in antecedent, S in succedent)" ++++
" i parametre term - instantiate parametre with term" ++++
" t goal int - try to refine goal to depth int" ++++
" n sequent - new sequent to prove" ++++
" u node - remove subtree node" ++++
" s - show subgoals" ++++
" w - show current proof tree" ++++
" a goal - show rules applicable to goal" ++++
" c calculus - change calculus into one of" ++++
" " ++ prCalculi ++++
" x file - read axioms from file and write rules into" ++++
" file.rules.tex" ++++
" l [file] - print current proof in LaTeX to file," ++++
" which is by default myproof.tex" ++++
" d [file] - print current proof in natural deduction to"++++
" LaTeX file, which is by default" ++++
" mydeduction.tex (works only for G3i, G3ip)" ++++
" h [file] - print history to file," ++++
" which is by default myhistory.txt" ++++
" ? - print this help message" ++++
" m - show the PESCA manual" ++++
" q - write history in myhistory.txt and quit\n"
----------------------
pCommand calculus =
jL "r" +.. pGoalId ... pJ (jL "A" +.. pIntc ||| succeed 1) ...
pJ (jL "S" +.. pIntc ||| succeed 1) ... pJ pRuleIdent
*** (\ (g,(i,(j,r))) -> CRefine g (i,j) r)
|||
jL "i" +.. pRuleIdent ... pJ pTerm
*** uncurry CInstance
|||
jL "x" +.. pRuleIdent *** CAxioms
|||
jL "t" +.. pGoalId ... pJ pIntc
*** uncurry CTry
|||
jL "n" +.. pSequent *** CNew
|||
jL "u" +.. pGoalId *** CRemove
|||
jL "s" <<< CShowGoals
|||
jL "w" <<< CShowTree
|||
jL "a" +.. pGoalId *** CShowApplicable
|||
jL "c" +.. pTList "+" pRuleIdent *** CChange . Calculus
|||
jL "l" +.. (pRuleIdent ||| succeed "myproof") *** CLatex
|||
jL "d" +.. (pRuleIdent ||| succeed "mydeduction") *** CNatural
|||
jL "h" +.. (pRuleIdent ||| succeed "myhistory.txt") *** CHistory
|||
jL "?" <<< CHelp
|||
jL "m" <<< CManual
|||
jL "q" <<< CQuit
---------------------
showAllApplicableRules :: (AbsCalculus,Proof) -> [Int] -> String
showAllApplicableRules (calculus,proof) ints =
case lookup ints [x | Left x <- goalsOfProof proof] of
Just sequent -> foldr ((++++) . prAr) [] (allApplicableRules calculus sequent)
_ -> prGoalId ints +++ "does not exist"
where
prAr ((sq,(i,j)),((r,_),_)) =
"r" +++ prGoalId ints +++ "A" ++ show i +++ "S" ++ show j +++ r +++
"--" +++ prSequent sq
-----------------------
changeCalculus calc calc' =
case rcalc' of
[] -> (calc, "new calculus not recognized\n")
_ -> (rcalc', "new calculus has" +++ show (length rcalc') +++ "rules\n")
where rcalc' = rulesOfCalculus calc'
| Tomoaki-Hashizaki/pesca | src/Editor.hs | gpl-2.0 | 8,119 | 46 | 56 | 2,924 | 2,197 | 1,127 | 1,070 | -1 | -1 |
-- | Manipulation of Limit-deterministic Buchi automata
module OmegaAutomata.LDBA(PowerPair, isLimitDeterministic, toLDBA) where
import OmegaAutomata.Automata
import Data.Graph.Inductive
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.Bimap as B
import Data.List (nub)
type PowerPair q = ([q], [q])
-- | Check whether NBA is limit-deterministic
isLimitDeterministic :: (Ord q, Ord a) => NBA q a l -- ^ The NBA to be checked
-> Bool -- ^ Indicates whether automaton is limit-det.
isLimitDeterministic a = S.isSubsetOf (accept a) (deterministicPart a)
deterministicPart :: (Ord q, Ord a) => NBA q a l
-> S.Set q
deterministicPart a = let g = graph a
transGraph = trc g
succDeterministic i = hasNoDuplicates $ map snd (lsuc g i)
det = [i | i <- nodes g, all succDeterministic (suc transGraph i)]
in
S.fromList $ map (toState a) det
hasNoDuplicates :: (Ord a) => [a] -> Bool
hasNoDuplicates xs = S.size (S.fromList xs) == length xs
-- | Convert NBA into equivalent LDBA using Courcoubetis and Yannakakis' construction
-- (c.f. article "The Complexity of Probabilistic Verification" by the same authors).
toLDBA :: (Ord q, Ord a) => NBA q a l -- ^ The NBA to be converted
-> NBA (Int, PowerPair q) a ([l], [l]) -- ^ The equivalent LDBA
toLDBA a = let ts = pairTransClosure a [([f],[f]) | f <- S.toList (accept a)]
ps = concat [[p1, p2] | (p1, _, p2) <- ts]
ls = [(labels q1 a, labels q2 a) | (q1, q2) <- ps]
as = [(qs1, qs2) | (qs1, qs2) <- ps, qs1 == qs2]
part1 = (liftStateToPair a){accept = S.fromList []}
part2 = makeNBA (zip ps ls) ts [] as
trans12 = [((1, (qs, [])), l, (2, ([f], [f]))) | f <- S.toList (accept a)
, (qs, l) <- combine (pres a f)]
in
insertTrans trans12 $ buchiUnion part1 part2
-- | Make transition for pair of sets of states,
-- as described in the article by Courcoubetis and Yannakakis
pairTrans :: (Ord q, Ord a) => NBA q a l -> PowerPair q -> [(PowerPair q, a)]
pairTrans a (qs1, qs2) = let
ts1 = powerSucc a qs1
acc1 = [f | f <- qs1, S.member f (accept a)]
qs2' l = if qs1 == qs2 then
powerASucc a acc1 l
else
powerASucc a (qs2 ++ [f | f <- acc1, not (f `elem` qs2)]) l
in
[((qs1', nub (qs2' l)), l) | (qs1', l) <- ts1]
-- | Compute all transitions reachable from list of pairs of states
pairTransClosure :: (Ord q, Ord a) => NBA q a l
-> [PowerPair q]
-> [(PowerPair q, a, PowerPair q)]
pairTransClosure a ps = [(ps1, l, ps2) | (ps1, vs) <- M.assocs (genPowerSet a (M.fromList []) ps)
, (ps2, l) <- vs]
-- | Compute power-set construction for pairs of sets of states, as described in the article
-- by Courcoubetis and Yannakakis
genPowerSet :: (Ord q, Ord a) =>
NBA q a l ->
M.Map (PowerPair q) [(PowerPair q, a)] ->
[PowerPair q] -> M.Map (PowerPair q) [(PowerPair q, a)]
genPowerSet a m (p:ps) = let psl' = (pairTrans a p)
ps' = [p' | (p', _) <- psl', not (M.member p' m)] in
if M.member p m then
genPowerSet a m ps
else
genPowerSet a (M.insert p psl' m) (ps' ++ ps)
genPowerSet _ m [] = m
-- | Lift states q and labels in an NBA
-- to pair of states ([q], []) and pair of labels ([l], [])
liftStateToPair :: (Ord q, Ord a) => NBA q a l -> NBA (PowerPair q) a ([l], [l])
liftStateToPair a = let liftToPair q = ([q], []) in
a{ states = S.map liftToPair (states a)
, bimap = B.map liftToPair (bimap a)
, graph = nmap (\l -> ([l], [])) (graph a)
, start = S.map liftToPair (start a)
, accept = S.map liftToPair (accept a)
}
-- | Return labels corresponding to list of states
labels :: (Ord q) => [q] -> NBA q a l -> [l]
labels qs a = [label q a | q <- qs]
| stefanjaax/omega-automata | src/OmegaAutomata/LDBA.hs | gpl-3.0 | 4,327 | 0 | 17 | 1,490 | 1,581 | 854 | 727 | 63 | 2 |
module Day01Spec where
import Day01
import SpecHelper
newtype ParenthesisString = ParenthesisString { unwrapString :: String}
deriving (Show)
genParenthesis :: Gen Char
genParenthesis = elements "()"
genParenthesisString :: Gen String
genParenthesisString = listOf genParenthesis
instance Arbitrary ParenthesisString where
arbitrary = ParenthesisString <$> genParenthesisString
prop_sum :: ParenthesisString -> ParenthesisString -> Bool
prop_sum ps1 ps2 = elevator s1 + elevator s2 == elevator (s1 ++ s2)
where s1 = unwrapString ps1
s2 = unwrapString ps2
prop_limit :: ParenthesisString -> Bool
prop_limit ps = elevator s <= length s && elevator s >= - length s
where s = unwrapString ps
prop_reverse :: ParenthesisString -> Bool
prop_reverse ps = elevator s == elevator (reverse s)
where s = unwrapString ps
spec :: Spec
spec = do
describe "elevator" $ do
it "fullfill the sum property" $ property prop_sum
it "fullfill the limit property" $ property prop_limit
it "fullfill the reverse property" $ property prop_reverse
it "return 2 in this case" $ elevator "((" `shouldBe` 2
describe "firstBasement" $ do
it "return 1 in this case" $ firstBasement ")" `shouldBe` 1
it "return 1 in this case" $ firstBasement "))()()((()))((()" `shouldBe` 1
it "return 17 in this case" $ firstBasement "(((()(())())())))" `shouldBe` 17
| M-Jack/adventofcode | test/Day01Spec.hs | gpl-3.0 | 1,462 | 1 | 12 | 339 | 385 | 188 | 197 | 32 | 1 |
{-# LANGUAGE NamedFieldPuns, DeriveDataTypeable, ParallelListComp, GeneralizedNewtypeDeriving, FlexibleInstances, FlexibleContexts #-}
module Fcr.Monad where
import Fcr.Syntax
import Fcr.PrettyPrinting
import Text.PrettyPrint
import Data.Typeable
import Control.Monad.State
import Control.Monad.Except
import Control.Exception
import qualified Data.Map as M
-- import Control.Applicative hiding (empty)
import Control.Monad.Reader
import Text.Parsec.Pos
import Data.List
data Env = Env{axioms :: [(Name, Exp)],
lemmas :: [(Name, (Exp, Exp))], -- (name, (proof, formula))
rules :: [(Name, Exp)],
tacs :: [((Name, Exp), [Tactic])],
kinds ::[(Name, Kind)],
pfdecls ::[(Name, Exp, Exp)], -- (name, formula, proof)
steps :: [(Name, Int)]
}
deriving Show
constKinds :: [(Name, Exp)] -> [(Name, Kind)]
constKinds rules =
let res = concat $ map (\ (_, t) -> helper t) rules
in nub $ res
where helper (Arrow t1 t2) = getKinds t1 ++ getKinds t2
helper (a@(Forall x t)) = helper $ viewFBody a
helper (Imply t1 t2) = helper t1 ++ helper t2
helper a = getKinds a
getKinds :: Exp -> [(Name, Kind)]
getKinds t = case flatten t of
(Const x):xs -> let arity = length xs
k = aToKind arity in
(x, k):(concat $ map getKinds xs)
(Var x):xs -> concat $ map getKinds xs
_ -> error "impossible happens in getKinds function"
aToKind :: Int -> Kind
aToKind n | n == 0 = Star
| n > 0 = KArrow Star (aToKind (n-1))
instance Disp Env where
disp (Env as lms rs ts ks pfs ss) =
text "rewrite rules" $$ (vcat (map (\ (n, exp) -> disp n <+> text ":" <+> disp exp) rs)) $$
text "kinds" $$ (vcat (map (\ (n, exp) -> disp n <+> text ":" <+> disp exp) ks)) $$
text "axioms" $$ (vcat (map (\ (n, exp) -> disp n <+> text ":" <+> disp exp) as)) $$
text "proof declarations" $$ (sep (map (\ (n, exp, pf) -> (disp n <+> text ":" <+> disp exp <+> text "=") $$ disp pf) pfs)) $$
text "lemmas" $$ (vcat (map (\ (n, (pf, exp)) -> (disp n <+> text ":" <+> disp exp <+> text "=") $$ (nest 2 $ disp pf)) lms))
$$ text "steps" $$ (vcat (map (\ (n, num) -> text "step" <+> text n <+> int num) ss))
emptyEnv :: Env
emptyEnv = Env {axioms = [], lemmas = [], rules = [], tacs = [], kinds = [], pfdecls = [],
steps = []}
extendAxiom :: Name -> Exp -> Env -> Env
extendAxiom v ts e@(Env {axioms}) = e{axioms = (v , ts) : axioms}
extendLemma :: Name -> Exp -> Exp -> Env -> Env
extendLemma v pf t e@(Env {lemmas}) = e{lemmas = (v, (pf, t)):lemmas}
extendRule :: Name -> Exp -> Env -> Env
extendRule v ts e@(Env {rules}) = e{rules = (v , ts) : rules}
extendTac :: Name -> Exp -> [Tactic] -> Env -> Env
extendTac v es ts e@(Env {tacs}) = e{tacs = ((v, es), ts) : tacs}
addKinds :: [(Name, Kind)] -> Env -> Env
addKinds ks e@(Env {kinds}) = e{kinds = ks}
addSteps :: [(Name, Int)] -> Env -> Env
addSteps ks e@(Env {steps}) = e{steps = ks}
addDecls :: [(Name, Exp, Exp)] -> Env -> Env
addDecls ks e@(Env {pfdecls}) = e{pfdecls = ks}
| Fermat/FCR | src/Fcr/Monad.hs | gpl-3.0 | 3,320 | 0 | 25 | 979 | 1,471 | 808 | 663 | 64 | 4 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.DFAReporting.ContentCategories.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing content category. This method supports patch
-- semantics.
--
-- /See:/ <https://developers.google.com/doubleclick-advertisers/ Campaign Manager 360 API Reference> for @dfareporting.contentCategories.patch@.
module Network.Google.Resource.DFAReporting.ContentCategories.Patch
(
-- * REST Resource
ContentCategoriesPatchResource
-- * Creating a Request
, contentCategoriesPatch
, ContentCategoriesPatch
-- * Request Lenses
, ccpXgafv
, ccpUploadProtocol
, ccpAccessToken
, ccpUploadType
, ccpProFileId
, ccpPayload
, ccpId
, ccpCallback
) where
import Network.Google.DFAReporting.Types
import Network.Google.Prelude
-- | A resource alias for @dfareporting.contentCategories.patch@ method which the
-- 'ContentCategoriesPatch' request conforms to.
type ContentCategoriesPatchResource =
"dfareporting" :>
"v3.5" :>
"userprofiles" :>
Capture "profileId" (Textual Int64) :>
"contentCategories" :>
QueryParam "id" (Textual Int64) :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] ContentCategory :>
Patch '[JSON] ContentCategory
-- | Updates an existing content category. This method supports patch
-- semantics.
--
-- /See:/ 'contentCategoriesPatch' smart constructor.
data ContentCategoriesPatch =
ContentCategoriesPatch'
{ _ccpXgafv :: !(Maybe Xgafv)
, _ccpUploadProtocol :: !(Maybe Text)
, _ccpAccessToken :: !(Maybe Text)
, _ccpUploadType :: !(Maybe Text)
, _ccpProFileId :: !(Textual Int64)
, _ccpPayload :: !ContentCategory
, _ccpId :: !(Textual Int64)
, _ccpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ContentCategoriesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'ccpXgafv'
--
-- * 'ccpUploadProtocol'
--
-- * 'ccpAccessToken'
--
-- * 'ccpUploadType'
--
-- * 'ccpProFileId'
--
-- * 'ccpPayload'
--
-- * 'ccpId'
--
-- * 'ccpCallback'
contentCategoriesPatch
:: Int64 -- ^ 'ccpProFileId'
-> ContentCategory -- ^ 'ccpPayload'
-> Int64 -- ^ 'ccpId'
-> ContentCategoriesPatch
contentCategoriesPatch pCcpProFileId_ pCcpPayload_ pCcpId_ =
ContentCategoriesPatch'
{ _ccpXgafv = Nothing
, _ccpUploadProtocol = Nothing
, _ccpAccessToken = Nothing
, _ccpUploadType = Nothing
, _ccpProFileId = _Coerce # pCcpProFileId_
, _ccpPayload = pCcpPayload_
, _ccpId = _Coerce # pCcpId_
, _ccpCallback = Nothing
}
-- | V1 error format.
ccpXgafv :: Lens' ContentCategoriesPatch (Maybe Xgafv)
ccpXgafv = lens _ccpXgafv (\ s a -> s{_ccpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
ccpUploadProtocol :: Lens' ContentCategoriesPatch (Maybe Text)
ccpUploadProtocol
= lens _ccpUploadProtocol
(\ s a -> s{_ccpUploadProtocol = a})
-- | OAuth access token.
ccpAccessToken :: Lens' ContentCategoriesPatch (Maybe Text)
ccpAccessToken
= lens _ccpAccessToken
(\ s a -> s{_ccpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
ccpUploadType :: Lens' ContentCategoriesPatch (Maybe Text)
ccpUploadType
= lens _ccpUploadType
(\ s a -> s{_ccpUploadType = a})
-- | User profile ID associated with this request.
ccpProFileId :: Lens' ContentCategoriesPatch Int64
ccpProFileId
= lens _ccpProFileId (\ s a -> s{_ccpProFileId = a})
. _Coerce
-- | Multipart request metadata.
ccpPayload :: Lens' ContentCategoriesPatch ContentCategory
ccpPayload
= lens _ccpPayload (\ s a -> s{_ccpPayload = a})
-- | ContentCategory ID.
ccpId :: Lens' ContentCategoriesPatch Int64
ccpId
= lens _ccpId (\ s a -> s{_ccpId = a}) . _Coerce
-- | JSONP
ccpCallback :: Lens' ContentCategoriesPatch (Maybe Text)
ccpCallback
= lens _ccpCallback (\ s a -> s{_ccpCallback = a})
instance GoogleRequest ContentCategoriesPatch where
type Rs ContentCategoriesPatch = ContentCategory
type Scopes ContentCategoriesPatch =
'["https://www.googleapis.com/auth/dfatrafficking"]
requestClient ContentCategoriesPatch'{..}
= go _ccpProFileId (Just _ccpId) _ccpXgafv
_ccpUploadProtocol
_ccpAccessToken
_ccpUploadType
_ccpCallback
(Just AltJSON)
_ccpPayload
dFAReportingService
where go
= buildClient
(Proxy :: Proxy ContentCategoriesPatchResource)
mempty
| brendanhay/gogol | gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/ContentCategories/Patch.hs | mpl-2.0 | 5,733 | 0 | 20 | 1,376 | 909 | 525 | 384 | 128 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidDeviceProvisioning.Customers.Devices.Unclaim
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Unclaims a device from a customer and removes it from zero-touch
-- enrollment. After removing a device, a customer must contact their
-- reseller to register the device into zero-touch enrollment again.
--
-- /See:/ <https://developers.google.com/zero-touch/ Android Device Provisioning Partner API Reference> for @androiddeviceprovisioning.customers.devices.unclaim@.
module Network.Google.Resource.AndroidDeviceProvisioning.Customers.Devices.Unclaim
(
-- * REST Resource
CustomersDevicesUnclaimResource
-- * Creating a Request
, customersDevicesUnclaim
, CustomersDevicesUnclaim
-- * Request Lenses
, cduParent
, cduXgafv
, cduUploadProtocol
, cduAccessToken
, cduUploadType
, cduPayload
, cduCallback
) where
import Network.Google.AndroidDeviceProvisioning.Types
import Network.Google.Prelude
-- | A resource alias for @androiddeviceprovisioning.customers.devices.unclaim@ method which the
-- 'CustomersDevicesUnclaim' request conforms to.
type CustomersDevicesUnclaimResource =
"v1" :>
Capture "parent" Text :>
"devices:unclaim" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CustomerUnclaimDeviceRequest :>
Post '[JSON] Empty
-- | Unclaims a device from a customer and removes it from zero-touch
-- enrollment. After removing a device, a customer must contact their
-- reseller to register the device into zero-touch enrollment again.
--
-- /See:/ 'customersDevicesUnclaim' smart constructor.
data CustomersDevicesUnclaim =
CustomersDevicesUnclaim'
{ _cduParent :: !Text
, _cduXgafv :: !(Maybe Xgafv)
, _cduUploadProtocol :: !(Maybe Text)
, _cduAccessToken :: !(Maybe Text)
, _cduUploadType :: !(Maybe Text)
, _cduPayload :: !CustomerUnclaimDeviceRequest
, _cduCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CustomersDevicesUnclaim' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cduParent'
--
-- * 'cduXgafv'
--
-- * 'cduUploadProtocol'
--
-- * 'cduAccessToken'
--
-- * 'cduUploadType'
--
-- * 'cduPayload'
--
-- * 'cduCallback'
customersDevicesUnclaim
:: Text -- ^ 'cduParent'
-> CustomerUnclaimDeviceRequest -- ^ 'cduPayload'
-> CustomersDevicesUnclaim
customersDevicesUnclaim pCduParent_ pCduPayload_ =
CustomersDevicesUnclaim'
{ _cduParent = pCduParent_
, _cduXgafv = Nothing
, _cduUploadProtocol = Nothing
, _cduAccessToken = Nothing
, _cduUploadType = Nothing
, _cduPayload = pCduPayload_
, _cduCallback = Nothing
}
-- | Required. The customer managing the device. An API resource name in the
-- format \`customers\/[CUSTOMER_ID]\`.
cduParent :: Lens' CustomersDevicesUnclaim Text
cduParent
= lens _cduParent (\ s a -> s{_cduParent = a})
-- | V1 error format.
cduXgafv :: Lens' CustomersDevicesUnclaim (Maybe Xgafv)
cduXgafv = lens _cduXgafv (\ s a -> s{_cduXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cduUploadProtocol :: Lens' CustomersDevicesUnclaim (Maybe Text)
cduUploadProtocol
= lens _cduUploadProtocol
(\ s a -> s{_cduUploadProtocol = a})
-- | OAuth access token.
cduAccessToken :: Lens' CustomersDevicesUnclaim (Maybe Text)
cduAccessToken
= lens _cduAccessToken
(\ s a -> s{_cduAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cduUploadType :: Lens' CustomersDevicesUnclaim (Maybe Text)
cduUploadType
= lens _cduUploadType
(\ s a -> s{_cduUploadType = a})
-- | Multipart request metadata.
cduPayload :: Lens' CustomersDevicesUnclaim CustomerUnclaimDeviceRequest
cduPayload
= lens _cduPayload (\ s a -> s{_cduPayload = a})
-- | JSONP
cduCallback :: Lens' CustomersDevicesUnclaim (Maybe Text)
cduCallback
= lens _cduCallback (\ s a -> s{_cduCallback = a})
instance GoogleRequest CustomersDevicesUnclaim where
type Rs CustomersDevicesUnclaim = Empty
type Scopes CustomersDevicesUnclaim = '[]
requestClient CustomersDevicesUnclaim'{..}
= go _cduParent _cduXgafv _cduUploadProtocol
_cduAccessToken
_cduUploadType
_cduCallback
(Just AltJSON)
_cduPayload
androidDeviceProvisioningService
where go
= buildClient
(Proxy :: Proxy CustomersDevicesUnclaimResource)
mempty
| brendanhay/gogol | gogol-androiddeviceprovisioning/gen/Network/Google/Resource/AndroidDeviceProvisioning/Customers/Devices/Unclaim.hs | mpl-2.0 | 5,598 | 0 | 17 | 1,226 | 783 | 458 | 325 | 113 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module TestUtil where
import BeholderObserver.Data
import Test.QuickCheck
import Data.Text as T
instance Arbitrary Project where
arbitrary = Project <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary DocSet where
arbitrary = DocSet <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary Doc where
arbitrary = TextDoc <$> arbitrary <*> arbitrary <*> arbitrary
instance Arbitrary Text where
arbitrary = T.pack <$> arbitrary
| proegssilb/beholder-observer | test/TestUtil.hs | apache-2.0 | 509 | 0 | 8 | 99 | 116 | 64 | 52 | 13 | 0 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
module Haddock.Backends.Hyperlinker.Ast (enrich) where
import Haddock.Syb
import Haddock.Backends.Hyperlinker.Types
import qualified GHC
import Control.Applicative
import Data.Data
import Data.Maybe
-- | Add more detailed information to token stream using GHC API.
enrich :: GHC.RenamedSource -> [Token] -> [RichToken]
enrich src =
map $ \token -> RichToken
{ rtkToken = token
, rtkDetails = enrichToken token detailsMap
}
where
detailsMap = concatMap ($ src)
[ variables
, types
, decls
, binds
, imports
]
-- | A map containing association between source locations and "details" of
-- this location.
--
-- For the time being, it is just a list of pairs. However, looking up things
-- in such structure has linear complexity. We cannot use any hashmap-like
-- stuff because source locations are not ordered. In the future, this should
-- be replaced with interval tree data structure.
type DetailsMap = [(GHC.SrcSpan, TokenDetails)]
lookupBySpan :: Span -> DetailsMap -> Maybe TokenDetails
lookupBySpan tspan = listToMaybe . map snd . filter (matches tspan . fst)
enrichToken :: Token -> DetailsMap -> Maybe TokenDetails
enrichToken (Token typ _ spn) dm
| typ `elem` [TkIdentifier, TkOperator] = lookupBySpan spn dm
enrichToken _ _ = Nothing
-- | Obtain details map for variables ("normally" used identifiers).
variables :: GHC.RenamedSource -> DetailsMap
variables =
everything (<|>) (var `combine` rec)
where
var term = case cast term of
(Just (GHC.L sspan (GHC.HsVar name))) ->
pure (sspan, RtkVar (GHC.unLoc name))
(Just (GHC.L _ (GHC.RecordCon (GHC.L sspan name) _ _ _))) ->
pure (sspan, RtkVar name)
_ -> empty
rec term = case cast term of
Just (GHC.HsRecField (GHC.L sspan name) (_ :: GHC.LHsExpr GHC.Name) _) ->
pure (sspan, RtkVar name)
_ -> empty
-- | Obtain details map for types.
types :: GHC.RenamedSource -> DetailsMap
types =
everything (<|>) ty
where
ty term = case cast term of
(Just (GHC.L sspan (GHC.HsTyVar name))) ->
pure (sspan, RtkType (GHC.unLoc name))
_ -> empty
-- | Obtain details map for identifier bindings.
--
-- That includes both identifiers bound by pattern matching or declared using
-- ordinary assignment (in top-level declarations, let-expressions and where
-- clauses).
binds :: GHC.RenamedSource -> DetailsMap
binds =
everything (<|>) (fun `combine` pat `combine` tvar)
where
fun term = case cast term of
(Just (GHC.FunBind (GHC.L sspan name) _ _ _ _ :: GHC.HsBind GHC.Name)) ->
pure (sspan, RtkBind name)
_ -> empty
pat term = case cast term of
(Just (GHC.L sspan (GHC.VarPat name))) ->
pure (sspan, RtkBind (GHC.unLoc name))
(Just (GHC.L _ (GHC.ConPatIn (GHC.L sspan name) recs))) ->
[(sspan, RtkVar name)] ++ everything (<|>) rec recs
(Just (GHC.L _ (GHC.AsPat (GHC.L sspan name) _))) ->
pure (sspan, RtkBind name)
_ -> empty
rec term = case cast term of
(Just (GHC.HsRecField (GHC.L sspan name) (_ :: GHC.LPat GHC.Name) _)) ->
pure (sspan, RtkVar name)
_ -> empty
tvar term = case cast term of
(Just (GHC.L sspan (GHC.UserTyVar name))) ->
pure (sspan, RtkBind (GHC.unLoc name))
(Just (GHC.L _ (GHC.KindedTyVar (GHC.L sspan name) _))) ->
pure (sspan, RtkBind name)
_ -> empty
-- | Obtain details map for top-level declarations.
decls :: GHC.RenamedSource -> DetailsMap
decls (group, _, _, _) = concatMap ($ group)
[ concat . map typ . concat . map GHC.group_tyclds . GHC.hs_tyclds
, everything (<|>) fun . GHC.hs_valds
, everything (<|>) (con `combine` ins)
]
where
typ (GHC.L _ t) = case t of
GHC.DataDecl name _ _ _ -> pure . decl $ name
GHC.SynDecl name _ _ _ -> pure . decl $ name
GHC.FamDecl fam -> pure . decl $ GHC.fdLName fam
GHC.ClassDecl{..} -> [decl tcdLName] ++ concatMap sig tcdSigs
fun term = case cast term of
(Just (GHC.FunBind (GHC.L sspan name) _ _ _ _ :: GHC.HsBind GHC.Name))
| GHC.isExternalName name -> pure (sspan, RtkDecl name)
_ -> empty
con term = case cast term of
(Just cdcl) ->
map decl (GHC.getConNames cdcl) ++ everything (<|>) fld cdcl
Nothing -> empty
ins term = case cast term of
(Just (GHC.DataFamInstD inst)) -> pure . tyref $ GHC.dfid_tycon inst
(Just (GHC.TyFamInstD (GHC.TyFamInstDecl (GHC.L _ eqn) _))) ->
pure . tyref $ GHC.tfe_tycon eqn
_ -> empty
fld term = case cast term of
Just (field :: GHC.ConDeclField GHC.Name)
-> map (decl . fmap GHC.selectorFieldOcc) $ GHC.cd_fld_names field
Nothing -> empty
sig (GHC.L _ (GHC.TypeSig names _)) = map decl names
sig _ = []
decl (GHC.L sspan name) = (sspan, RtkDecl name)
tyref (GHC.L sspan name) = (sspan, RtkType name)
-- | Obtain details map for import declarations.
--
-- This map also includes type and variable details for items in export and
-- import lists.
imports :: GHC.RenamedSource -> DetailsMap
imports src@(_, imps, _, _) =
everything (<|>) ie src ++ mapMaybe (imp . GHC.unLoc) imps
where
ie term = case cast term of
(Just (GHC.IEVar v)) -> pure $ var v
(Just (GHC.IEThingAbs t)) -> pure $ typ t
(Just (GHC.IEThingAll t)) -> pure $ typ t
(Just (GHC.IEThingWith t _ vs _fls)) ->
[typ t] ++ map var vs
_ -> empty
typ (GHC.L sspan name) = (sspan, RtkType name)
var (GHC.L sspan name) = (sspan, RtkVar name)
imp idecl | not . GHC.ideclImplicit $ idecl =
let (GHC.L sspan name) = GHC.ideclName idecl
in Just (sspan, RtkModule name)
imp _ = Nothing
-- | Check whether token stream span matches GHC source span.
--
-- Currently, it is implemented as checking whether "our" span is contained
-- in GHC span. The reason for that is because GHC span are generally wider
-- and may spread across couple tokens. For example, @(>>=)@ consists of three
-- tokens: @(@, @>>=@, @)@, but GHC source span associated with @>>=@ variable
-- contains @(@ and @)@. Similarly, qualified identifiers like @Foo.Bar.quux@
-- are tokenized as @Foo@, @.@, @Bar@, @.@, @quux@ but GHC source span
-- associated with @quux@ contains all five elements.
matches :: Span -> GHC.SrcSpan -> Bool
matches tspan (GHC.RealSrcSpan aspan)
| saspan <= stspan && etspan <= easpan = True
where
stspan = (posRow . spStart $ tspan, posCol . spStart $ tspan)
etspan = (posRow . spEnd $ tspan, posCol . spEnd $ tspan)
saspan = (GHC.srcSpanStartLine aspan, GHC.srcSpanStartCol aspan)
easpan = (GHC.srcSpanEndLine aspan, GHC.srcSpanEndCol aspan)
matches _ _ = False
| randen/haddock | haddock-api/src/Haddock/Backends/Hyperlinker/Ast.hs | bsd-2-clause | 7,030 | 0 | 18 | 1,787 | 2,301 | 1,189 | 1,112 | 129 | 10 |
-- |
-- Module: LTL
-- Description: Bounded Linear Temporal Logic (LTL) operators
-- Copyright: (c) 2011 National Institute of Aerospace / Galois, Inc.
--
-- Bounded Linear Temporal Logic (LTL) operators. For a bound @n@, a property
-- @p@ holds if it holds on the next @n@ transitions (between periods). If
-- @n == 0@, then the trace includes only the current period. For example,
--
-- @
-- eventually 3 p
-- @
--
-- holds if @p@ holds at least once every four periods (3 transitions).
--
-- /Interface:/ See @Examples/LTLExamples.hs@ in the
-- <https://github.com/leepike/Copilot/tree/master/Examples Copilot repository>.
--
-- You can embed an LTL specification within a Copilot specification using the
-- form:
--
-- @
-- operator spec
-- @
--
-- For some properties, stream dependencies may not allow their specification.
-- In particular, you cannot determine the "future" value of an external
-- variable. In general, the "Copilot.Library.PTLTL" library is probably more useful.
{-# LANGUAGE NoImplicitPrelude #-}
module Copilot.Library.LTL
( next, eventually, always, until, release ) where
import Copilot.Language
import Copilot.Library.Utils
-- | Property @s@ holds at the next period. For example:
--
-- @
-- 0 1 2 3 4 5 6 7
-- s => F F F T F F T F ...
-- next s => F F T F F T F ...
-- @
-- Note: s must have sufficient history to 'drop' a value from it.
next :: Stream Bool -> Stream Bool
next = drop ( 1 :: Int )
-- | Property @s@ holds for the next @n@ periods. We require @n >= 0@. If @n ==
-- 0@, then @s@ holds in the current period, e.g., if @p = always 2 s@, then we
-- have the following relationship between the streams generated:
--
-- @
-- 0 1 2 3 4 5 6 7
-- s => T T T F T T T T ...
-- p => T F F F T T ...
-- @
always :: ( Integral a ) => a -> Stream Bool -> Stream Bool
always n = nfoldl1 ( fromIntegral n + 1 ) (&&)
-- | Property @s@ holds at some period in the next @n@ periods. If @n == 0@,
-- then @s@ holds in the current period. We require @n >= 0@. E.g., if @p =
-- eventually 2 s@, then we have the following relationship between the streams
-- generated:
--
-- @
-- s => F F F T F F F T ...
-- p => F T T T F T T T ...
-- @
eventually :: ( Integral a ) =>
a -- ^ 'n'
-> Stream Bool -- ^ 's'
-> Stream Bool
eventually n = nfoldl1 ( fromIntegral n + 1 ) (||)
-- | @until n s0 s1@ means that @eventually n s1@, and up until at least the
-- period before @s1@ holds, @s0@ continuously holds.
until :: ( Integral a ) => a -> Stream Bool -> Stream Bool -> Stream Bool
until n s0 s1 = foldl1 (||) v0
where n' = fromIntegral n
v0 = [ always ( i :: Int ) s0 && drop ( i + 1 ) s1
| i <- [ 0 .. n' - 1 ]
]
-- | @release n s0 s1@ means that either @always n s1@, or @s1@ holds up to and
-- including the period at which @s0@ becomes true.
release :: ( Integral a ) => a -> Stream Bool -> Stream Bool -> Stream Bool
release n s0 s1 = always n s1 || foldl1 (||) v0
where n' = fromIntegral n
v0 = [ always ( i :: Int ) s1 && drop i s0
| i <- [ 0 .. n' - 1 ]
]
| leepike/copilot-libraries | src/Copilot/Library/LTL.hs | bsd-3-clause | 3,162 | 0 | 11 | 818 | 467 | 275 | 192 | 24 | 1 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances,
GeneralizedNewtypeDeriving #-}
module Network.Mircy.Internal where
import Control.Applicative
import Control.Monad.State
import Control.Monad.Reader
import qualified Data.ByteString as B
import System.IO
newtype MircyT m a = MircyT (ReaderT Handle m a)
deriving (Functor, Applicative, Monad, MonadTrans, MonadIO)
type Mircy a = MircyT IO a
runMircyT :: MircyT m a -> Handle -> m a
runMircyT (MircyT r) = runReaderT r
instance (Monad m) => MonadReader Handle (MircyT m) where
ask = MircyT ask
local f (MircyT m) = MircyT $ local f m
class MonadMircy m where
getIRCHandle :: (Monad m) => m Handle
instance (Monad m) => MonadMircy (MircyT m) where
getIRCHandle = ask
data IRCMessage = IRCReply Int B.ByteString B.ByteString
| IRCError Int B.ByteString B.ByteString
| IRCNotice B.ByteString B.ByteString
| IRCUnknown B.ByteString
| IRCMsg B.ByteString B.ByteString B.ByteString B.ByteString
| IRCJoinMsg B.ByteString B.ByteString B.ByteString
| IRCNickMsg B.ByteString B.ByteString B.ByteString
deriving (Eq, Show)
data IRCCommand = IRCUser B.ByteString B.ByteString B.ByteString B.ByteString
| IRCNick B.ByteString
| IRCJoin B.ByteString
| IRCPrivMsg B.ByteString B.ByteString
| IRCQuit (Maybe B.ByteString)
| IRCWho (Maybe (B.ByteString, Bool))
| mikeyhc/mircy | src/Network/Mircy/Internal.hs | bsd-3-clause | 1,561 | 0 | 10 | 434 | 431 | 234 | 197 | 34 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- | This module contains Haskell variables representing globally visible
-- names for files, paths, extensions.
module Language.Rsc.Core.Files (
-- * Hardwired paths
getPreludeJSONPath
, getPreludeTSPath
, getDomJSONPath
, getDomTSPath
, getTSBindPath
)
where
import Paths_refscript
import System.FilePath
-------------------------------------------------------------------------------
getPreludeTSPath, getDomTSPath, getPreludeJSONPath, getDomJSONPath :: IO FilePath
-------------------------------------------------------------------------------
getPreludeTSPath = getDataFileName "include/prelude.d.ts"
getDomTSPath = getDataFileName "include/ambient/dom.ts"
getPreludeJSONPath = (`replaceExtension` ".json") <$> getPreludeTSPath
getDomJSONPath = (`replaceExtension` ".json") <$> getDomTSPath
getTSBindPath = getDataFileName "./ext/tsc-bin/built/local/tsc-refscript.js"
| UCSD-PL/RefScript | src/Language/Rsc/Core/Files.hs | bsd-3-clause | 974 | 0 | 6 | 138 | 107 | 70 | 37 | 15 | 1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.FramebufferObject
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <[email protected]>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.FramebufferObject (
-- * Extension Support
glGetEXTFramebufferObject,
gl_EXT_framebuffer_object,
-- * Enums
pattern GL_COLOR_ATTACHMENT0_EXT,
pattern GL_COLOR_ATTACHMENT10_EXT,
pattern GL_COLOR_ATTACHMENT11_EXT,
pattern GL_COLOR_ATTACHMENT12_EXT,
pattern GL_COLOR_ATTACHMENT13_EXT,
pattern GL_COLOR_ATTACHMENT14_EXT,
pattern GL_COLOR_ATTACHMENT15_EXT,
pattern GL_COLOR_ATTACHMENT1_EXT,
pattern GL_COLOR_ATTACHMENT2_EXT,
pattern GL_COLOR_ATTACHMENT3_EXT,
pattern GL_COLOR_ATTACHMENT4_EXT,
pattern GL_COLOR_ATTACHMENT5_EXT,
pattern GL_COLOR_ATTACHMENT6_EXT,
pattern GL_COLOR_ATTACHMENT7_EXT,
pattern GL_COLOR_ATTACHMENT8_EXT,
pattern GL_COLOR_ATTACHMENT9_EXT,
pattern GL_DEPTH_ATTACHMENT_EXT,
pattern GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT,
pattern GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT,
pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT,
pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT,
pattern GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT,
pattern GL_FRAMEBUFFER_BINDING_EXT,
pattern GL_FRAMEBUFFER_COMPLETE_EXT,
pattern GL_FRAMEBUFFER_EXT,
pattern GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT,
pattern GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT,
pattern GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT,
pattern GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT,
pattern GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT,
pattern GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT,
pattern GL_FRAMEBUFFER_UNSUPPORTED_EXT,
pattern GL_INVALID_FRAMEBUFFER_OPERATION_EXT,
pattern GL_MAX_COLOR_ATTACHMENTS_EXT,
pattern GL_MAX_RENDERBUFFER_SIZE_EXT,
pattern GL_RENDERBUFFER_ALPHA_SIZE_EXT,
pattern GL_RENDERBUFFER_BINDING_EXT,
pattern GL_RENDERBUFFER_BLUE_SIZE_EXT,
pattern GL_RENDERBUFFER_DEPTH_SIZE_EXT,
pattern GL_RENDERBUFFER_EXT,
pattern GL_RENDERBUFFER_GREEN_SIZE_EXT,
pattern GL_RENDERBUFFER_HEIGHT_EXT,
pattern GL_RENDERBUFFER_INTERNAL_FORMAT_EXT,
pattern GL_RENDERBUFFER_RED_SIZE_EXT,
pattern GL_RENDERBUFFER_STENCIL_SIZE_EXT,
pattern GL_RENDERBUFFER_WIDTH_EXT,
pattern GL_STENCIL_ATTACHMENT_EXT,
pattern GL_STENCIL_INDEX16_EXT,
pattern GL_STENCIL_INDEX1_EXT,
pattern GL_STENCIL_INDEX4_EXT,
pattern GL_STENCIL_INDEX8_EXT,
-- * Functions
glBindFramebufferEXT,
glBindRenderbufferEXT,
glCheckFramebufferStatusEXT,
glDeleteFramebuffersEXT,
glDeleteRenderbuffersEXT,
glFramebufferRenderbufferEXT,
glFramebufferTexture1DEXT,
glFramebufferTexture2DEXT,
glFramebufferTexture3DEXT,
glGenFramebuffersEXT,
glGenRenderbuffersEXT,
glGenerateMipmapEXT,
glGetFramebufferAttachmentParameterivEXT,
glGetRenderbufferParameterivEXT,
glIsFramebufferEXT,
glIsRenderbufferEXT,
glRenderbufferStorageEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| haskell-opengl/OpenGLRaw | src/Graphics/GL/EXT/FramebufferObject.hs | bsd-3-clause | 3,269 | 0 | 5 | 349 | 355 | 225 | 130 | 75 | 0 |
module Yhc.Core.Firstify.Mitchell.Terminate(
Terminate, emptyTerminate,
addInline, askInline,
addSpec, askSpec, cloneSpec
) where
import qualified Data.Homeomorphic as H
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Maybe
import Debug.Trace
import Yhc.Core
import Yhc.Core.Util
data Terminate = Terminate
{verbose :: Bool
,terminate :: Map.Map CoreFuncName Term
}
data Term = Term
{specs :: [H.Homeomorphic CoreExpr1 CoreExpr]
,inlined :: Set.Set CoreFuncName
}
homeoOrder = 8 :: Int
insertH key val [] = error "Logic fault, insertH"
insertH key val (x:xs) | isNothing (H.findOne key x) = H.insert key val x : xs
| otherwise = x : insertH key val xs
findH key xs = if any null res then [] else concat res
where res = map (H.find key) xs
get name t = Map.findWithDefault emptyTerm name (terminate t)
modify t name op = t{terminate = Map.insert name (op $ get name t) (terminate t)}
logger t msg answer = (if verbose t && not answer then trace msg else id) answer
emptyTerminate :: Bool -> Terminate
emptyTerminate b = Terminate b Map.empty
emptyTerm :: Term
emptyTerm = Term (replicate homeoOrder H.empty) Set.empty
addInline :: CoreFuncName -> CoreFuncName -> Terminate -> Terminate
addInline within on t = modify t within $ \x -> x{inlined = Set.insert on $ inlined x}
askInline :: CoreFuncName -> CoreFuncName -> Terminate -> Bool
askInline within on t = logger t ("Skipped inlining of: " ++ on ++ " within " ++ within) $
on `Set.notMember` inlined (get within t)
addSpec :: CoreFuncName -> CoreExpr -> Terminate -> Terminate
addSpec within on t = modify t within $ \x -> x{specs = insertH (specKey on) on $ specs x}
specKey = shellify . blurVar
askSpec :: CoreFuncName -> CoreExpr -> Terminate -> Bool
askSpec within on t = logger t ("Skipped spec of:\n" ++ show on ++ "\nbecause of\n" ++ show res) $
length res < 1
where
res = findH (specKey on) $ specs $ get within t
cloneSpec :: CoreFuncName -> CoreFuncName -> Terminate -> Terminate
cloneSpec from to t = case Map.lookup from (terminate t) of
Nothing -> t
Just y -> t{terminate = Map.insert to y{inlined=Set.empty} $ terminate t}
| ndmitchell/firstify | Yhc/Core/Firstify/Mitchell/Terminate.hs | bsd-3-clause | 2,289 | 0 | 14 | 527 | 848 | 442 | 406 | 46 | 2 |