code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Database.Persist.Sql.Orphan.PersistQuery
( deleteWhereCount
, updateWhereCount
, decorateSQLWithLimitOffset
) where
import Database.Persist hiding (updateField)
import Database.Persist.Sql.Util (
entityColumnNames, parseEntityValues, isIdField)
import Database.Persist.Sql.Types
import Database.Persist.Sql.Raw
import Database.Persist.Sql.Orphan.PersistStore (withRawQuery)
import Database.Persist.Sql.Util (dbIdColumns)
import qualified Data.Text as T
import Data.Text (Text)
import Data.Monoid (Monoid (..), (<>))
import Data.Int (Int64)
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader (ReaderT, ask)
import Control.Exception (throwIO)
import qualified Data.Conduit.List as CL
import Data.Conduit
import Data.ByteString.Char8 (readInteger)
import Data.Maybe (isJust)
import Data.List (transpose, inits, find)
-- orphaned instance for convenience of modularity
instance PersistQuery SqlBackend where
count filts = do
conn <- ask
let wher = if null filts
then ""
else filterClause False conn filts
let sql = mconcat
[ "SELECT COUNT(*) FROM "
, connEscapeName conn $ entityDB t
, wher
]
withRawQuery sql (getFiltsValues conn filts) $ do
mm <- CL.head
case mm of
Just [PersistInt64 i] -> return $ fromIntegral i
Just [PersistDouble i] ->return $ fromIntegral (truncate i :: Int64) -- gb oracle
Just [PersistByteString i] -> case readInteger i of -- gb mssql
Just (ret,"") -> return $ fromIntegral ret
xs -> error $ "invalid number i["++show i++"] xs[" ++ show xs ++ "]"
Just xs -> error $ "count:invalid sql return xs["++show xs++"] sql["++show sql++"]"
Nothing -> error $ "count:invalid sql returned nothing sql["++show sql++"]"
where
t = entityDef $ dummyFromFilts filts
selectSourceRes filts opts = do
conn <- ask
srcRes <- rawQueryRes (sql conn) (getFiltsValues conn filts)
return $ fmap ($= CL.mapM parse) srcRes
where
(limit, offset, orders) = limitOffsetOrder opts
parse vals = case parseEntityValues t vals of
Left s -> liftIO $ throwIO $ PersistMarshalError s
Right row -> return row
t = entityDef $ dummyFromFilts filts
wher conn = if null filts
then ""
else filterClause False conn filts
ord conn =
case map (orderClause False conn) orders of
[] -> ""
ords -> " ORDER BY " <> T.intercalate "," ords
cols = T.intercalate ", " . entityColumnNames t
sql conn = connLimitOffset conn (limit,offset) (not (null orders)) $ mconcat
[ "SELECT "
, cols conn
, " FROM "
, connEscapeName conn $ entityDB t
, wher conn
, ord conn
]
selectKeysRes filts opts = do
conn <- ask
srcRes <- rawQueryRes (sql conn) (getFiltsValues conn filts)
return $ fmap ($= CL.mapM parse) srcRes
where
t = entityDef $ dummyFromFilts filts
cols conn = T.intercalate "," $ dbIdColumns conn t
wher conn = if null filts
then ""
else filterClause False conn filts
sql conn = connLimitOffset conn (limit,offset) (not (null orders)) $ mconcat
[ "SELECT "
, cols conn
, " FROM "
, connEscapeName conn $ entityDB t
, wher conn
, ord conn
]
(limit, offset, orders) = limitOffsetOrder opts
ord conn =
case map (orderClause False conn) orders of
[] -> ""
ords -> " ORDER BY " <> T.intercalate "," ords
parse xs = do
keyvals <- case entityPrimary t of
Nothing ->
case xs of
[PersistInt64 x] -> return [PersistInt64 x]
[PersistDouble x] -> return [PersistInt64 (truncate x)] -- oracle returns Double
_ -> liftIO $ throwIO $ PersistMarshalError $ "Unexpected in selectKeys False: " <> T.pack (show xs)
Just pdef ->
let pks = map fieldHaskell $ compositeFields pdef
keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) xs
in return keyvals
case keyFromValues keyvals of
Right k -> return k
Left _ -> error "selectKeysImpl: keyFromValues failed"
deleteWhere filts = do
_ <- deleteWhereCount filts
return ()
updateWhere filts upds = do
_ <- updateWhereCount filts upds
return ()
-- | Same as 'deleteWhere', but returns the number of rows affected.
--
-- Since 1.1.5
deleteWhereCount :: (PersistEntity val, MonadIO m, PersistEntityBackend val ~ SqlBackend)
=> [Filter val]
-> ReaderT SqlBackend m Int64
deleteWhereCount filts = do
conn <- ask
let t = entityDef $ dummyFromFilts filts
let wher = if null filts
then ""
else filterClause False conn filts
sql = mconcat
[ "DELETE FROM "
, connEscapeName conn $ entityDB t
, wher
]
rawExecuteCount sql $ getFiltsValues conn filts
-- | Same as 'updateWhere', but returns the number of rows affected.
--
-- Since 1.1.5
updateWhereCount :: (PersistEntity val, MonadIO m, SqlBackend ~ PersistEntityBackend val)
=> [Filter val]
-> [Update val]
-> ReaderT SqlBackend m Int64
updateWhereCount _ [] = return 0
updateWhereCount filts upds = do
conn <- ask
let wher = if null filts
then ""
else filterClause False conn filts
let sql = mconcat
[ "UPDATE "
, connEscapeName conn $ entityDB t
, " SET "
, T.intercalate "," $ map (go' conn . go) upds
, wher
]
let dat = map updatePersistValue upds `mappend`
getFiltsValues conn filts
rawExecuteCount sql dat
where
t = entityDef $ dummyFromFilts filts
go'' n Assign = n <> "=?"
go'' n Add = mconcat [n, "=", n, "+?"]
go'' n Subtract = mconcat [n, "=", n, "-?"]
go'' n Multiply = mconcat [n, "=", n, "*?"]
go'' n Divide = mconcat [n, "=", n, "/?"]
go'' _ (BackendSpecificUpdate up) = error $ T.unpack $ "BackendSpecificUpdate" `mappend` up `mappend` "not supported"
go' conn (x, pu) = go'' (connEscapeName conn x) pu
go x = (updateField x, updateUpdate x)
updateField (Update f _ _) = fieldName f
updateField _ = error "BackendUpdate not implemented"
fieldName :: forall record typ. (PersistEntity record, PersistEntityBackend record ~ SqlBackend) => EntityField record typ -> DBName
fieldName f = fieldDB $ persistFieldDef f
dummyFromFilts :: [Filter v] -> Maybe v
dummyFromFilts _ = Nothing
getFiltsValues :: forall val. (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> SqlBackend -> [Filter val] -> [PersistValue]
getFiltsValues conn = snd . filterClauseHelper False False conn OrNullNo
data OrNull = OrNullYes | OrNullNo
filterClauseHelper :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> Bool -- ^ include table name?
-> Bool -- ^ include WHERE?
-> SqlBackend
-> OrNull
-> [Filter val]
-> (Text, [PersistValue])
filterClauseHelper includeTable includeWhere conn orNull filters =
(if not (T.null sql) && includeWhere
then " WHERE " <> sql
else sql, vals)
where
(sql, vals) = combineAND filters
combineAND = combine " AND "
combine s fs =
(T.intercalate s $ map wrapP a, mconcat b)
where
(a, b) = unzip $ map go fs
wrapP x = T.concat ["(", x, ")"]
go (BackendFilter _) = error "BackendFilter not expected"
go (FilterAnd []) = ("1=1", [])
go (FilterAnd fs) = combineAND fs
go (FilterOr []) = ("1=0", [])
go (FilterOr fs) = combine " OR " fs
go (Filter field value pfilter) =
let t = entityDef $ dummyFromFilts [Filter field value pfilter]
in case (isIdField field, entityPrimary t, allVals) of
(True, Just pdef, PersistList ys:_) ->
if length (compositeFields pdef) /= length ys
then error $ "wrong number of entries in compositeFields vs PersistList allVals=" ++ show allVals
else
case (allVals, pfilter, isCompFilter pfilter) of
([PersistList xs], Eq, _) ->
let sqlcl=T.intercalate " and " (map (\a -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") (compositeFields pdef))
in (wrapSql sqlcl,xs)
([PersistList xs], Ne, _) ->
let sqlcl=T.intercalate " or " (map (\a -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") (compositeFields pdef))
in (wrapSql sqlcl,xs)
(_, In, _) ->
let xxs = transpose (map fromPersistList allVals)
sqls=map (\(a,xs) -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip (compositeFields pdef) xxs)
in (wrapSql (T.intercalate " and " (map wrapSql sqls)), concat xxs)
(_, NotIn, _) ->
let xxs = transpose (map fromPersistList allVals)
sqls=map (\(a,xs) -> connEscapeName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip (compositeFields pdef) xxs)
in (wrapSql (T.intercalate " or " (map wrapSql sqls)), concat xxs)
([PersistList xs], _, True) ->
let zs = tail (inits (compositeFields pdef))
sql1 = map (\b -> wrapSql (T.intercalate " and " (map (\(i,a) -> sql2 (i==length b) a) (zip [1..] b)))) zs
sql2 islast a = connEscapeName conn (fieldDB a) <> (if islast then showSqlFilter pfilter else showSqlFilter Eq) <> "? "
sqlcl = T.intercalate " or " sql1
in (wrapSql sqlcl, concat (tail (inits xs)))
(_, BackendSpecificFilter _, _) -> error "unhandled type BackendSpecificFilter for composite/non id primary keys"
_ -> error $ "unhandled type/filter for composite/non id primary keys pfilter=" ++ show pfilter ++ " persistList="++show allVals
(True, Just pdef, _) -> error $ "unhandled error for composite/non id primary keys pfilter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef
_ -> case (isNull, pfilter, varCount) of
(True, Eq, _) -> (name <> " IS NULL", [])
(True, Ne, _) -> (name <> " IS NOT NULL", [])
(False, Ne, _) -> (T.concat
[ "("
, name
, " IS NULL OR "
, name
, " <> "
, qmarks
, ")"
], notNullVals)
-- We use 1=2 (and below 1=1) to avoid using TRUE and FALSE, since
-- not all databases support those words directly.
(_, In, 0) -> ("1=2" <> orNullSuffix, [])
(False, In, _) -> (name <> " IN " <> qmarks <> orNullSuffix, allVals)
(True, In, _) -> (T.concat
[ "("
, name
, " IS NULL OR "
, name
, " IN "
, qmarks
, ")"
], notNullVals)
(_, NotIn, 0) -> ("1=1", [])
(False, NotIn, _) -> (T.concat
[ "("
, name
, " IS NULL OR "
, name
, " NOT IN "
, qmarks
, ")"
], notNullVals)
(True, NotIn, _) -> (T.concat
[ "("
, name
, " IS NOT NULL AND "
, name
, " NOT IN "
, qmarks
, ")"
], notNullVals)
_ -> (name <> showSqlFilter pfilter <> "?" <> orNullSuffix, allVals)
where
isCompFilter Lt = True
isCompFilter Le = True
isCompFilter Gt = True
isCompFilter Ge = True
isCompFilter _ = False
wrapSql sqlcl = "(" <> sqlcl <> ")"
fromPersistList (PersistList xs) = xs
fromPersistList other = error $ "expected PersistList but found " ++ show other
filterValueToPersistValues :: forall a. PersistField a => Either a [a] -> [PersistValue]
filterValueToPersistValues v = map toPersistValue $ either return id v
orNullSuffix =
case orNull of
OrNullYes -> mconcat [" OR ", name, " IS NULL"]
OrNullNo -> ""
isNull = any (== PersistNull) allVals
notNullVals = filter (/= PersistNull) allVals
allVals = filterValueToPersistValues value
tn = connEscapeName conn $ entityDB
$ entityDef $ dummyFromFilts [Filter field value pfilter]
name =
(if includeTable
then ((tn <> ".") <>)
else id)
$ connEscapeName conn $ fieldName field
qmarks = case value of
Left _ -> "?"
Right x ->
let x' = filter (/= PersistNull) $ map toPersistValue x
in "(" <> T.intercalate "," (map (const "?") x') <> ")"
varCount = case value of
Left _ -> 1
Right x -> length x
showSqlFilter Eq = "="
showSqlFilter Ne = "<>"
showSqlFilter Gt = ">"
showSqlFilter Lt = "<"
showSqlFilter Ge = ">="
showSqlFilter Le = "<="
showSqlFilter In = " IN "
showSqlFilter NotIn = " NOT IN "
showSqlFilter (BackendSpecificFilter s) = s
updatePersistValue :: Update v -> PersistValue
updatePersistValue (Update _ v _) = toPersistValue v
updatePersistValue _ = error "BackendUpdate not implemented"
filterClause :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> Bool -- ^ include table name?
-> SqlBackend
-> [Filter val]
-> Text
filterClause b c = fst . filterClauseHelper b True c OrNullNo
orderClause :: (PersistEntity val, PersistEntityBackend val ~ SqlBackend)
=> Bool -- ^ include the table name
-> SqlBackend
-> SelectOpt val
-> Text
orderClause includeTable conn o =
case o of
Asc x -> name x
Desc x -> name x <> " DESC"
_ -> error "orderClause: expected Asc or Desc, not limit or offset"
where
dummyFromOrder :: SelectOpt a -> Maybe a
dummyFromOrder _ = Nothing
tn = connEscapeName conn $ entityDB $ entityDef $ dummyFromOrder o
name :: (PersistEntityBackend record ~ SqlBackend, PersistEntity record)
=> EntityField record typ -> Text
name x =
(if includeTable
then ((tn <> ".") <>)
else id)
$ connEscapeName conn $ fieldName x
-- | Generates sql for limit and offset for postgres, sqlite and mysql.
decorateSQLWithLimitOffset::Text -> (Int,Int) -> Bool -> Text -> Text
decorateSQLWithLimitOffset nolimit (limit,offset) _ sql =
let
lim = case (limit, offset) of
(0, 0) -> ""
(0, _) -> T.cons ' ' nolimit
(_, _) -> " LIMIT " <> T.pack (show limit)
off = if offset == 0
then ""
else " OFFSET " <> T.pack (show offset)
in mconcat
[ sql
, lim
, off
]
| jasonzoladz/persistent | persistent/Database/Persist/Sql/Orphan/PersistQuery.hs | mit | 17,476 | 0 | 33 | 6,945 | 4,826 | 2,491 | 2,335 | 344 | 43 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, NondecreasingIndentation
#-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Encoding.Latin1
-- Copyright : (c) The University of Glasgow, 2009
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- UTF-32 Codecs for the IO library
--
-- Portions Copyright : (c) Tom Harper 2008-2009,
-- (c) Bryan O'Sullivan 2009,
-- (c) Duncan Coutts 2009
--
-----------------------------------------------------------------------------
module GHC.IO.Encoding.Latin1 (
latin1, mkLatin1,
latin1_checked, mkLatin1_checked,
latin1_decode,
latin1_encode,
latin1_checked_encode,
) where
import GHC.Base
import GHC.Real
import GHC.Num
-- import GHC.IO
import GHC.IO.Buffer
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
-- -----------------------------------------------------------------------------
-- Latin1
latin1 :: TextEncoding
latin1 = mkLatin1 ErrorOnCodingFailure
-- | @since 4.4.0.0
mkLatin1 :: CodingFailureMode -> TextEncoding
mkLatin1 cfm = TextEncoding { textEncodingName = "ISO8859-1",
mkTextDecoder = latin1_DF cfm,
mkTextEncoder = latin1_EF cfm }
latin1_DF :: CodingFailureMode -> IO (TextDecoder ())
latin1_DF cfm =
return (BufferCodec {
encode = latin1_decode,
recover = recoverDecode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
latin1_EF :: CodingFailureMode -> IO (TextEncoder ())
latin1_EF cfm =
return (BufferCodec {
encode = latin1_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
latin1_checked :: TextEncoding
latin1_checked = mkLatin1_checked ErrorOnCodingFailure
-- | @since 4.4.0.0
mkLatin1_checked :: CodingFailureMode -> TextEncoding
mkLatin1_checked cfm = TextEncoding { textEncodingName = "ISO8859-1(checked)",
mkTextDecoder = latin1_DF cfm,
mkTextEncoder = latin1_checked_EF cfm }
latin1_checked_EF :: CodingFailureMode -> IO (TextEncoder ())
latin1_checked_EF cfm =
return (BufferCodec {
encode = latin1_checked_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
latin1_decode :: DecodeBuffer
latin1_decode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
c0 <- readWord8Buf iraw ir
ow' <- writeCharBuf oraw ow (unsafeChr (fromIntegral c0))
loop (ir+1) ow'
-- lambda-lifted, to avoid thunks being built in the inner-loop:
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
in
loop ir0 ow0
latin1_encode :: EncodeBuffer
latin1_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
writeWord8Buf oraw ow (fromIntegral (ord c))
loop ir' (ow+1)
in
loop ir0 ow0
latin1_checked_encode :: EncodeBuffer
latin1_checked_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
if ord c > 0xff then invalid else do
writeWord8Buf oraw ow (fromIntegral (ord c))
loop ir' (ow+1)
where
invalid = done InvalidSequence ir ow
in
loop ir0 ow0
| beni55/haste-compiler | libraries/ghc-7.10/base/GHC/IO/Encoding/Latin1.hs | bsd-3-clause | 5,128 | 0 | 19 | 1,719 | 1,300 | 708 | 592 | 106 | 3 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Text.Greek.Script.Word where
import Prelude hiding (Word)
import GHC.Generics (Generic)
import Data.Aeson (ToJSON, FromJSON)
import Data.Text (Text)
import Text.Greek.Source.FileReference
import qualified Text.Greek.Script.Elision as Elision
import qualified Text.Greek.Script.Punctuation as Punctuation
import qualified Control.Lens as Lens
import qualified Data.Text as Text
import qualified Text.Greek.Source.Morphgnt as Morphgnt
data IsCapitalized = IsCapitalized | IsNotCapitalized deriving (Eq, Ord, Show, Generic)
instance ToJSON IsCapitalized
instance FromJSON IsCapitalized
data LastWord = IsLastWord | NotLastWord deriving (Eq, Ord, Show, Generic)
instance ToJSON LastWord
instance FromJSON LastWord
data Crasis = HasCrasis | NoCrasis deriving (Eq, Ord, Show, Generic)
instance ToJSON Crasis
instance FromJSON Crasis
data InitialEnclitic
= UnaccentedAfterDoubleIsEnclitic
| SandwichDoubleEncliticIsEnclitic
| DoubleAccentNotEnclitic
| AccentedUnlikelyEnclitic
| AccentedNotEnclitic
| NoSyllableNotEnclitic
| NoAccentUncertainEnclitic
| OtherUncertainEnclitic
deriving (Eq, Ord, Show, Generic)
instance ToJSON InitialEnclitic
instance FromJSON InitialEnclitic
newtype ParagraphIndex = ParagraphIndex { getParagraphIndex :: Int } deriving (Eq, Ord, Show, Generic, Num)
instance ToJSON ParagraphIndex
instance FromJSON ParagraphIndex
newtype VerseIndex = VerseIndex { getVerseIndex :: Int } deriving (Eq, Ord, Show, Generic, Num)
instance ToJSON VerseIndex
instance FromJSON VerseIndex
data Verse = Verse
{ verseIndex :: VerseIndex
, verseTitle :: Text
} deriving (Eq, Ord, Show, Generic)
instance ToJSON Verse
instance FromJSON Verse
newtype Index = Index { getIndex :: Int } deriving (Eq, Ord, Show, Generic)
instance ToJSON Index
instance FromJSON Index
data Word i s = Word
{ getInfo :: i
, getSurface :: s
} deriving (Generic)
Lens.makeLensesFor
[ ("getInfo", "info")
, ("getSurface", "surface")
]
''Word
instance (ToJSON i, ToJSON s) => ToJSON (Word i s)
instance (FromJSON i, FromJSON s) => FromJSON (Word i s)
newtype Source = Source { getSource :: Text } deriving (Eq, Ord, Show, Generic)
instance ToJSON Source
instance FromJSON Source
data SourceInfo = SourceInfo
{ getSourceInfoWord :: Source
, getSourceInfoFile :: FileReference
} deriving (Eq, Ord, Show, Generic)
instance ToJSON SourceInfo
instance FromJSON SourceInfo
newtype LetterCount = LetterCount { getLetterCount :: Int } deriving (Eq, Show, Ord, Generic)
instance ToJSON LetterCount
instance FromJSON LetterCount
newtype MarkCount = MarkCount { getMarkCount :: Int } deriving (Eq, Show, Ord, Generic)
instance ToJSON MarkCount
instance FromJSON MarkCount
newtype VowelCount = VowelCount { getVowelCount :: Int } deriving (Eq, Show, Ord, Generic)
instance ToJSON VowelCount
instance FromJSON VowelCount
newtype ConsonantCount = ConsonantCount { getConsonantCount :: Int } deriving (Eq, Show, Ord, Generic)
instance ToJSON ConsonantCount
instance FromJSON ConsonantCount
newtype AcuteCircumflexCount = AcuteCircumflexCount { getAcuteCircumflexCount :: Int } deriving (Eq, Show, Ord, Generic)
instance ToJSON AcuteCircumflexCount
instance FromJSON AcuteCircumflexCount
newtype Prefix = Prefix { getPrefix :: Text } deriving (Eq, Show, Ord, Generic)
instance ToJSON Prefix
instance FromJSON Prefix
newtype Suffix = Suffix { getSuffix :: Text } deriving (Eq, Show, Ord, Generic)
Lens.makeLensesFor
[ ("getSuffix", "suffix")
]
''Suffix
instance ToJSON Suffix
instance FromJSON Suffix
makePrefix :: Text -> Maybe Prefix
makePrefix = fmap Prefix . nothingIfEmpty . Text.strip
makeSuffix :: Text -> Maybe Suffix
makeSuffix = fmap Suffix . nothingIfEmpty . Text.strip
nothingIfEmpty :: Text -> Maybe Text
nothingIfEmpty x | Text.null x = Nothing
nothingIfEmpty x = Just x
type Affix = (Maybe Prefix, Maybe Suffix)
data Accent
= AccentNone
| AccentAcuteUltima
| AccentAcutePenult
| AccentAcuteAntepenult
| AccentCircumflexUltima
| AccentCircumflexPenult
deriving (Eq, Ord, Show, Generic)
instance ToJSON Accent
instance FromJSON Accent
data UltimaUnaccented = UltimaUnaccented | UltimaAccented deriving (Eq, Ord, Show, Generic)
instance ToJSON UltimaUnaccented
instance FromJSON UltimaUnaccented
getUltimaUnaccented :: Accent -> UltimaUnaccented
getUltimaUnaccented AccentAcuteUltima = UltimaAccented
getUltimaUnaccented AccentCircumflexUltima = UltimaAccented
getUltimaUnaccented _ = UltimaUnaccented
type IndexedP a = (Index, a)
type Indexed = IndexedP ()
indexLens :: Lens.Lens (Index, a) (b, a) Index b
indexLens = Lens._1
type BasicInfo = (Affix, ParagraphIndex, Verse, Morphgnt.Word)
type BasicP a = IndexedP (BasicInfo, a)
type Basic = BasicP ()
basicLens :: Lens.Lens (IndexedP a) (IndexedP b) a b
basicLens = Lens._2
prefixLens :: Lens.Lens (IndexedP (((Maybe Prefix, a), x, y, z), p)) (IndexedP (((b, a), x, y, z), p)) (Maybe Prefix) b
prefixLens = basicLens . Lens._1 . Lens._1 . Lens._1
suffixLens :: Lens.Lens (IndexedP (((a, Maybe Suffix), x, y, z), p)) (IndexedP (((a, b), x, y, z), p)) (Maybe Suffix) b
suffixLens = basicLens . Lens._1 . Lens._1 . Lens._2
paragraphIndexLens :: Lens.Lens (IndexedP ((x, ParagraphIndex, y, z), p)) (IndexedP ((x, b, y, z), p)) ParagraphIndex b
paragraphIndexLens = basicLens . Lens._1 . Lens._2
verseLens :: Lens.Lens (IndexedP ((x, y, Verse, z), p)) (IndexedP ((x, y, b, z), p)) Verse b
verseLens = basicLens . Lens._1 . Lens._3
morphgntWordLens :: Lens.Lens (IndexedP ((x, y, z, Morphgnt.Word), p)) (IndexedP ((x, y, z, b), p)) Morphgnt.Word b
morphgntWordLens = basicLens . Lens._1 . Lens._4
type ElisionP a = BasicP (Elision.Pair, a)
type Elision = ElisionP ()
elisionLens' :: Lens.Lens (BasicP a) (BasicP b) a b
elisionLens' = basicLens . Lens._2
elisionLens :: Lens.Lens (BasicP (a, x)) (BasicP (b, x)) a b
elisionLens = elisionLens' . Lens._1
type CapitalP a = ElisionP (IsCapitalized, a)
type Capital = CapitalP ()
capitalLens' :: Lens.Lens (ElisionP a) (ElisionP b) a b
capitalLens' = elisionLens' . Lens._2
capitalLens :: Lens.Lens (ElisionP (IsCapitalized, x)) (ElisionP (b, x)) IsCapitalized b
capitalLens = capitalLens' . Lens._1
type WithCrasisP a = CapitalP (Crasis, a)
type WithCrasis = WithCrasisP ()
crasisLens' :: Lens.Lens (CapitalP a) (CapitalP b) a b
crasisLens' = capitalLens' . Lens._2
crasisLens :: Lens.Lens (CapitalP (Crasis, x)) (CapitalP (b, x)) Crasis b
crasisLens = crasisLens' . Lens._1
type SentenceP a = WithCrasisP (Punctuation.SentencePair, a)
type Sentence = SentenceP ()
sentenceLens' :: Lens.Lens (WithCrasisP a) (WithCrasisP b) a b
sentenceLens' = crasisLens' . Lens._2
sentenceLens :: Lens.Lens (WithCrasisP (Punctuation.SentencePair, x)) (WithCrasisP (b, x)) Punctuation.SentencePair b
sentenceLens = sentenceLens' . Lens._1
type WithEncliticP a = SentenceP (InitialEnclitic, a)
type WithEnclitic = WithEncliticP ()
encliticLens' :: Lens.Lens (SentenceP a) (SentenceP b) a b
encliticLens' = sentenceLens' . Lens._2
encliticLens :: Lens.Lens (SentenceP (InitialEnclitic, x)) (SentenceP (b, x)) InitialEnclitic b
encliticLens = encliticLens' . Lens._1
type WithAccentP a = WithEncliticP (Accent, a)
type WithAccent = WithAccentP ()
accentLens' :: Lens.Lens (WithEncliticP a) (WithEncliticP b) a b
accentLens' = encliticLens' . Lens._2
accentLens :: Lens.Lens (WithEncliticP (Accent, x)) (WithEncliticP (b, x)) Accent b
accentLens = accentLens' . Lens._1
addIndex :: [Word a s] -> [Word (IndexedP (a, ())) s]
addIndex = fmap arrange . zip (fmap Index [0..])
where
arrange (i, Word a s) = Word (i, (a, ())) s
tagLastWords :: [Word a b] -> [(Word a b, LastWord)]
tagLastWords = reverse . go . reverse
where
go [] = []
go (x : xs) = (x, IsLastWord) : finish xs
finish = fmap (\x -> (x, NotLastWord))
addElisionPair :: Elision.Pair -> Basic -> Elision
addElisionPair e = Lens.set elisionLens' (e, ())
addCapital :: IsCapitalized -> Elision -> Capital
addCapital x = Lens.set capitalLens' (x, ())
addCrasis :: Crasis -> Capital -> WithCrasis
addCrasis c = Lens.set crasisLens' (c, ())
addSentencePair :: Punctuation.SentencePair -> WithCrasis -> Sentence
addSentencePair s = Lens.set sentenceLens' (s, ())
addInitialEnclitic :: InitialEnclitic -> Sentence -> WithEnclitic
addInitialEnclitic e = Lens.set encliticLens' (e, ())
addAccent :: Accent -> WithEnclitic -> WithAccent
addAccent a = Lens.set accentLens' (a, ())
| scott-fleischman/greek-grammar | haskell/greek-grammar/src/Text/Greek/Script/Word.hs | mit | 8,514 | 0 | 11 | 1,292 | 3,057 | 1,686 | 1,371 | 193 | 2 |
{-# LANGUAGE FlexibleInstances #-}
module Cric.Packages
( PackageManager(..), PkgManagerError(..), Package(..)
, getPackageManager
, installPackage
, installWithRPM, installWithYum, installWithAPT, installWithPkgAdd
, removePackage
, removeWithRPM, removeWithYum, removeWithAPT, removeWithPkgAdd
) where
import qualified Data.ByteString.Char8 as BS
import Cric
import Cric.SystemInfo
class Show p => Package p where
urlForRPM :: OperatingSystem -> p -> String
urlForRPM _ _ = ""
nameForYum :: OperatingSystem -> p -> String
nameForYum _ _ = ""
nameForAPT :: OperatingSystem -> p -> String
nameForAPT _ _ = ""
nameForPkgAdd :: OperatingSystem -> p -> String
nameForPkgAdd _ _ = ""
instance Package String where
urlForRPM = flip const
nameForYum = flip const
nameForAPT = flip const
nameForPkgAdd = flip const
data PackageManager = RPM | Yum | APT | PkgAdd
| UnknownPackageManager
deriving (Show, Eq)
data PkgManagerError = NoPackageManagerFound
| UnknownPkgManagerError Result
-- ^ Unknown error together with status code and output
deriving (Show, Eq)
-- | Find a package manager on the server by testing if the command is available.
getPackageManager :: MonadCric m => m PackageManager
getPackageManager = getPackageManager'
[ ("yum", Yum)
, ("apt-get", APT)
, ("rpm", RPM)
, ("pkg_add", PkgAdd)
]
where
getPackageManager' :: MonadCric m => [(String, PackageManager)] -> m PackageManager
getPackageManager' [] = return UnknownPackageManager
getPackageManager' ((cmd,mgr):rest) = do
test <- testCommand cmd
if test
then return mgr
else getPackageManager' rest
-- | Install a package with the package manager found.
installPackage :: (MonadCric m, Package p) => p -> m (Either PkgManagerError BS.ByteString)
installPackage pkg = do
logMsg Info $ "Installing " ++ show pkg ++ " ..."
pkgMgr <- getPackageManager
os <- getOS
result <- case pkgMgr of
Yum -> installWithYum os pkg
APT -> installWithAPT os pkg
RPM -> installWithRPM os pkg
PkgAdd -> installWithPkgAdd os pkg
UnknownPackageManager -> return $ Left NoPackageManagerFound
case result of
Left err -> logMsg Error $ "Error installing " ++ show pkg ++ " (" ++ show err ++ ")"
Right _ -> logMsg Info $ "Package " ++ show pkg ++ " installed successfully."
return result
installWithRPM :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
installWithRPM os pkg = execManager . ("rpm -i "++) $ urlForRPM os pkg
installWithYum :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
installWithYum os pkg = execManager . ("yum install -y "++) $ nameForYum os pkg
installWithAPT :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
installWithAPT os pkg = execManager . ("apt-get install -y "++) $ nameForAPT os pkg
installWithPkgAdd :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
installWithPkgAdd os pkg = execManager . ("pkg_add "++) $ nameForPkgAdd os pkg
-- | Remove a package with the package manager found.
removePackage :: (MonadCric m, Package p) => p -> m (Either PkgManagerError BS.ByteString)
removePackage pkg = do
logMsg Info $ "Removing " ++ show pkg ++ " ..."
pkgMgr <- getPackageManager
os <- getOS
result <- case pkgMgr of
Yum -> removeWithYum os pkg
APT -> removeWithAPT os pkg
RPM -> removeWithRPM os pkg
PkgAdd -> removeWithPkgAdd os pkg
UnknownPackageManager -> return $ Left NoPackageManagerFound
case result of
Left err -> logMsg Error $ "Error removing " ++ show pkg ++ " (" ++ show err ++ ")"
Right _ -> logMsg Info $ "Package " ++ show pkg ++ " removed successfully."
return result
removeWithRPM :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
removeWithRPM os pkg = execManager . ("rpm -e "++) $ urlForRPM os pkg
removeWithYum :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
removeWithYum os pkg = execManager . ("yum remove -y "++) $ nameForYum os pkg
removeWithAPT :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
removeWithAPT os pkg = execManager . ("apt-get remove -y "++) $ nameForAPT os pkg
removeWithPkgAdd :: (MonadCric m, Package p) => OperatingSystem -> p -> m (Either PkgManagerError BS.ByteString)
removeWithPkgAdd os pkg = execManager . ("pkg_delete "++) $ nameForPkgAdd os pkg
execManager :: MonadCric m => String -> m (Either PkgManagerError BS.ByteString)
execManager cmd = do
result <- exec cmd
return $ if isSuccess result
then Right $ outFromResult result
else Left $ UnknownPkgManagerError result
| thoferon/cric | src/Cric/Packages.hs | mit | 5,023 | 0 | 15 | 1,119 | 1,510 | 769 | 741 | 96 | 6 |
module ParserLib.SimpleParser where
import Control.Applicative (Alternative(..), some)
import Data.Bifunctor (first)
import Data.Char (isDigit, isLetter)
-- We only use pattern matching to extract the function, so we don't need to use record syntax
newtype Parser a = Parser (String -> Either String (a, String))
runParser :: Parser a -> String -> Either String a
runParser (Parser parse) input =
case parse input of
Left msg -> Left msg
Right (x, "") -> Right x
Right (_, r) -> Left $ "Parser did not consume the entire input " ++ r
charMatch :: (Char -> Bool) -> Parser Char
charMatch f = Parser charParser
where charParser (c:r) | f c = Right (c, r)
| otherwise = Left $ "Predicate does not match at: " ++ head (lines (c:r))
charParser "" = Left "Empty input"
instance Functor Parser where
fmap f (Parser parse) = Parser $ fmap (first f) . parse
instance Applicative Parser where
pure x = Parser $ \s -> Right (x, s)
Parser p1 <*> Parser p2 = Parser app
where app s = case p1 s of
Left msg -> Left msg
Right (f, r) -> fmap (first f) (p2 r)
instance Alternative Parser where
empty = Parser $ const (Left "Alternative.empty")
Parser p1 <|> Parser p2 =
Parser $ \s -> case (p1 s, p2 s) of
(Right x, _) -> Right x
(_, Right x) -> Right x
(Left msg1, Left msg2) -> Left $ concat ["Alternative failed: ", msg1, " <|> ", msg2]
-- We don't really need the monad instance in our project since
-- the applicative instance is powerful enough for our purpose.
-- i.e., you can comment this out and the project compiles!
instance Monad Parser where
return = pure
-- Parser a >>= (a -> Parser b) -> Parser b
(Parser p1) >>= f = Parser g
where
-- g :: String -> Either String (b, String)
g s = do
(a, r1) <- p1 s
let (Parser p2) = f a
p2 r1
char :: Char -> Parser Char
char ch = charMatch (==ch)
string :: String -> Parser ()
string "" = Parser $ \s -> Right ((), s)
string (c:r) = char c *> string r
number :: Parser Int
number = read <$> some digit
where digit = charMatch isDigit
name :: Parser String
name = some (charMatch isLetter)
parens :: Parser a -> Parser a
parens x = char '(' *> x <* char ')'
sepBy :: Parser s -> Parser a -> Parser [a]
sepBy s x = (:) <$> x <*> many (s *> x) <|> pure []
commaSep :: Parser a -> Parser [a]
commaSep = sepBy (char ',')
newline :: Parser ()
newline = string "\r\n" <|> string "\n"
| mohsen3/haskell-tutorials | src/nano-lang/src/ParserLib/SimpleParser.hs | mit | 2,503 | 0 | 14 | 646 | 970 | 487 | 483 | 56 | 3 |
module Asm.Parser (printTree
, parseText
, parseFile
, parseStatement
, parseArg) where
import Asm.Expr
import Asm.Preprocess
import Text.Parsec hiding (space, spaces, hexDigit)
import Text.Parsec.Expr
import Text.Parsec.String (Parser)
import Numeric
import Control.Monad
import Data.Maybe (catMaybes)
import Data.Char (ord, toUpper)
-- Lexer
parseBin :: String -> Integer
parseBin = foldl (\l r -> l * 2 + (if r == '1' then 1 else 0)) 0
hexDigit :: Parser Char
hexDigit = oneOf "abcdefABCDEF0123456789"
binPrefix :: Parser Integer
binPrefix = do
char '%'
str <- many1 $ oneOf "01"
return $ parseBin str
binSuffix :: Parser Integer
binSuffix = do
str <- many1 $ oneOf "01"
oneOf "Bb"
return $ parseBin str
binary :: Parser Integer
binary = binPrefix <|> binSuffix
hexPrefix :: Parser Integer
hexPrefix = do
char '$'
str <- many1 hexDigit
return $ fst . head . readHex $ str
hexSuffix :: Parser Integer
hexSuffix = do
str <- many1 hexDigit
oneOf "Hh"
return $ fst . head . readHex $ str
hexadecimal :: Parser Integer
hexadecimal = hexPrefix <|> hexSuffix
decimal :: Parser Integer
decimal = do
str <- many1 digit
return $ read str
charNum :: Parser Integer
charNum = do
char '\''
c <- anyChar
char '\''
return $ toInteger (ord c)
integer :: Parser Integer
integer = lexeme $ do
sign <- option '+' (oneOf "+-")
num <- try hexadecimal <|> try binary <|> try decimal <|> try charNum
notFollowedBy lblChar
return $ if sign == '+' then num else -num
space :: Parser Char
space = char ' '
spaces :: Parser String
spaces = many1 space
tabs :: Parser String
tabs = many1 tab
comment :: Parser String
comment = do
char ';'
many (try $ noneOf "\n")
whiteSpace :: Parser ()
whiteSpace = skipMany (spaces <|> tabs <|> comment)
lexeme :: Parser a -> Parser a
lexeme p = do
x <- p
whiteSpace
return x
stringNoCase :: String -> Parser String
stringNoCase "" = return ""
stringNoCase (s:str) = do
l <- satisfy (\c -> toUpper c == toUpper s)
r <- stringNoCase str
return $ l:r
symbol :: String -> Parser String
symbol = lexeme . stringNoCase
parens :: Parser a -> Parser a
parens p = do
symbol "("
x <- p
symbol ")"
return x
braces :: Parser a -> Parser a
braces p = do
symbol "{"
x <- p
symbol "}"
return x
semi :: Parser String
semi = symbol ";"
comma :: Parser String
comma = symbol ","
operator :: Parser String
operator = lexeme $ do
op <- oneOf "+-*/><%"
end <- case op of
'>' -> option "" (try (string ">"))
'<' -> option "" (try (string "<"))
_ -> return ""
return $ op:end
identifier :: Parser String
identifier = lexeme $ do
x <- letter <|> char '_'
xs <- many (alphaNum <|> char '_')
return $ x:xs
commaSep :: Parser a -> Parser [a]
commaSep p = lexeme $ p `sepBy` comma
semiSep :: Parser a -> Parser [a]
semiSep p = lexeme $ p `sepBy` semi
stringEscape :: Parser Char
stringEscape = char '\'' >> oneOf "\\\""
stringLiteral :: Parser String
stringLiteral = lexeme $ do
char '"'
str <- many (stringEscape <|> noneOf "\"")
char '"'
return str
singleton :: Parser a -> Parser [a]
singleton = liftM (:[])
lblChar :: Parser Char
lblChar = alphaNum <|> char '_'
lblIdentifier :: Parser String
lblIdentifier = lexeme $ many1 lblChar
parseMaybe :: (String -> Maybe b) -> Parser b
parseMaybe f = do
ident <- identifier
case f ident of
Nothing -> parserZero
Just x -> return x
-- Parser
asmlabel :: Parser Expr
asmlabel = lexeme $ do
name <- lblIdentifier
char ':'
return $ LabelDef name
condition :: Parser Expr
condition = Cond `fmap` parseMaybe readCondMaybe
instr :: Parser Expr
instr = do
instr <- parseMaybe readMaybeUpper
args <- case instr of
CALL -> jpCond
JR -> jpCond
JP -> jpCond
RET -> option [] (try $ singleton condition)
_ -> commaSep argExpr
return $ Instr instr args
where jpCond = do
cond <- option [] (try $ do
x <- condition
symbol ","
return [x])
arg <- argExpr
return $ cond ++ [arg]
num :: Parser Expr
num = do
x <- integer
return $ Literal (Num $ fromIntegral x)
labelref :: Parser Expr
labelref = do
ident <- lblIdentifier <|> symbol "$"
return $ Literal (Label ident)
constAssign :: Parser Expr
constAssign = do
name <- lblIdentifier
symbol "=" <|> (optional (char '.') >> symbol "equ")
val <- argExpr
return $ Define name val
asmstring :: Parser Expr
asmstring = String `fmap` stringLiteral
register :: Parser Expr
register = do
reg <- parseMaybe readMaybeUpper
return $ if regIs8Bit reg then Reg8 reg
else case reg of
IX -> Reg16Index IX
IY -> Reg16Index IY
_ -> Reg16 reg
regIndirect :: Parser Expr
regIndirect = do
regName <- parens $ choice [
symbol "hl",
symbol "de",
symbol "bc",
try $ symbol "ix",
symbol "iy",
symbol "sp",
symbol "c"
]
return $ case regName of
"ix" -> RegIndex IX (Literal $ Num 0)
"iy" -> RegIndex IY (Literal $ Num 0)
"hl" -> Reg8 HL'
_ -> RegIndir $ readReg regName
regIndex :: Parser Expr
regIndex = parens $ do
reg <- try $ symbol "ix" <|> symbol "iy"
op <- symbol "+" <|> symbol "-"
arg <- mathExpr
return $ RegIndex (readReg reg) $ case op of
"+" -> arg
"-" -> Binop Mul (litNum (-1)) arg
addrIndirect :: Parser Expr
addrIndirect = do
addr <- parens mathExpr
return $ AddrIndir addr
directive :: Parser Expr
directive = do
oneOf "#."
ident <- identifier
args <- commaSep directiveArg
return $ Directive ident args
mathOp :: Parser (Expr -> Expr -> Expr)
mathOp = do
op <- operator
return $ case op of
"+" -> Binop Add
"-" -> Binop Sub
"*" -> Binop Mul
"/" -> Binop Div
"<" -> Binop Lt
">" -> Binop Gt
"%" -> Binop Mod
"<<" -> Binop LShift
">>" -> Binop RShift
"|" -> Binop Or
"&" -> Binop And
"^" -> Binop Xor
parensExpr :: Parser Expr -> Parser Expr
parensExpr p = do
xpr <- parens p
return $ Parens xpr
antiQuoteExpr :: Parser Expr
antiQuoteExpr = do
symbol "@{"
q <- many $ noneOf "}"
symbol "}"
return $ AntiQuote q
antiQuoteStr :: Parser Expr
antiQuoteStr = do
symbol "@s{"
q <- many $ noneOf "}"
symbol "}"
return $ AntiQuoteStr q
antiQuote = try antiQuoteStr <|> antiQuoteExpr
binOp :: Parser Expr
binOp = chainl1 (try num <|> try antiQuote <|> try labelref <|> try (parensExpr mathExpr)) mathOp
mathExpr :: Parser Expr
mathExpr = try binOp
<|> try num
<|> try antiQuote
<|> labelref
directiveArg :: Parser Expr
directiveArg = try mathExpr
<|> asmstring
<|> parens directiveArg
argExpr :: Parser Expr
argExpr = try register
<|> try regIndirect
<|> try regIndex
<|> try binOp
<|> antiQuote
statement :: Parser Expr
statement = try antiQuoteExpr <|> try directive <|> try constAssign <|> try asmlabel <|> instr
parseStatements :: Parser [Expr]
parseStatements = do
let parseLine = whiteSpace >> optionMaybe (try statement)
stmnts <- parseLine `sepBy` many1 newline
return $ catMaybes stmnts
-- Convert top level 'Parens' exprs to AddrIndir exprs. Consider merging with 'removeParens' pass
indirPass :: [Expr] -> [Expr]
indirPass = map conv
where conv (Instr i args) = Instr i (map convParens args)
conv x = x
convParens (Parens xpr) = AddrIndir xpr
convParens x = x
-- Remove 'Parens' exprs, as we no longer need them
removeParens :: [Expr] -> [Expr]
removeParens = map conv
where conv (Parens xpr) = conv xpr
conv (Binop op l r) = Binop op (conv l) (conv r)
conv (Directive str xs) = Directive str (removeParens xs)
conv (Define str xpr) = Define str (conv xpr)
conv (RegIndex r xpr) = RegIndex r (conv xpr)
conv (Instr i xs) = Instr i (removeParens xs)
conv xpr = xpr
-- Parses an argument
parseArg :: String -> Either String Expr
parseArg x = case parse (whiteSpace >> argExpr) "" x of
Left err -> Left $ show err
Right ast -> Right . head . removeParens . indirPass $ [ast]
-- Parses a single statement
parseStatement :: String -> Either String Expr
parseStatement x = case parse (whiteSpace >> statement) "" x of
Left err -> Left $ show err
Right ast -> Right . head . removeParens . indirPass $ [ast]
-- preprocesses and parses text; returns an error or an AST
-- parseText contents fname will parse contents and report errors as coming from fname
parseText :: String -> String -> Either String [Expr]
parseText fname contents =
case parse parseStatements fname (preprocess contents) of
Left err -> Left $ show err
Right ast -> Right $ removeParens . indirPass $ ast
-- reads, preprocesses, and parses a file. Reads with #includes
parseFile :: FilePath -> IO (Either String [Expr])
parseFile fname = do
contents <- readWithIncludes fname
return $ parseText fname contents
printTree :: [Expr] -> String
printTree xprs = unlines (map show xprs)
| unknownloner/calccomp | Asm/Parser.hs | mit | 9,656 | 0 | 16 | 2,865 | 3,404 | 1,632 | 1,772 | 308 | 12 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Inference(infer,Context,equations,typeToTypeExpr) where
import Data.List(nub,delete)
import Data.Set as S(Set(..),empty,singleton,insert,delete,union,fromList,toList)
import Control.Monad.Identity
import Control.Monad.Trans.State
import Control.Monad.Trans.Class
import Control.Monad.Trans.Writer
import Unification
import Syntax
import ProofTree
type InferenceState a = StateT UsedTypeVars Maybe a
-- TE standing for Type Expression (type TypeExpr)
-- Meaning that we are considering type expressions (with variables to be used
-- in unification) rather than simple types.
type TEContext a = [TECtxtJudgment a]
type TECtxtJudgment a = (a, TypeExpr)
type TypeJudgment a = (LambdaTerm a, Type)
filterElement :: (a -> Bool) -> [a] -> Maybe a
filterElement f list = let filtered = filter f list
in if length filtered > 0
then Just $ head filtered
else Nothing
varTypeJudgment :: Eq a
=> a -- Type of variables
-> TEContext a -- Typing context, parameterized
-- by the type of term variables
-> Maybe (TECtxtJudgment a) -- Judgement that involves
-- that variable, if any
varTypeJudgment x = filterElement ((x ==) . fst)
-- Extract the set of variables that are judged in a context.
varsOfContext :: Ord a => TEContext a -> Set a
varsOfContext c = fromList (varsOfContext' c)
where varsOfContext' [] = []
varsOfContext' ((a, t) : rest) = a : (varsOfContext' rest)
-- Pick a fresh variable number
newTypeVar :: InferenceState Int
newTypeVar = do usedTypes <- get
let newVar = pickFresh usedTypes
put (newVar:usedTypes)
return newVar
strongDelete :: Eq a => a -> [a] -> [a]
strongDelete x list = let newList = Data.List.delete x list
in if length list == length newList
then error "strongDelete failed"
else newList
strongInsert :: Eq a => TECtxtJudgment a -> TEContext a -> TEContext a
strongInsert (v,t) ctxt = if length (filter (\(x,_) -> x == v) ctxt) > 0
then error "strongInsert failed"
else (v,t) : ctxt
-- Compute type equations from term.
-- Returns the set of equations, the type variable associated with the input
-- lambda term, and the context in which the term is typable.
-- The output context is a superset of the input context.
-- Notice that a term is not closed iff the output context is non-empty.
equations :: (Ord a, FreshPickable a)
=> TEContext a
-> LambdaTerm a
-> InferenceState (TEContext a, Int, Equations)
-- -> State UsedTypeVars (TEContext a, Int, Equations)
equations gamma0 (Var x) =
case varTypeJudgment x gamma0 of
Just (_, t) -> do newV <- newTypeVar
return (gamma0, newV, singleton (TEVar newV, t))
Nothing -> do newV <- newTypeVar
return (strongInsert (x, TEVar newV) gamma0,
newV, fromList [])
equations gamma0 term@(Abstr v t) = do
boundVType <- newTypeVar
let freshVar = (pickFresh . toList) (varsOfContext gamma0)
(Abstr v' t') = alphaRename freshVar term
(gamma, typ, eq) <- equations (strongInsert (v', TEVar boundVType) gamma0) t'
freshT <- newTypeVar
return (strongDelete (v', TEVar boundVType) gamma,
freshT,
insert (TEVar freshT, TEArrow (TEVar boundVType) (TEVar typ)) eq)
equations gamma0 term@(Appl term1 term2) = do
(gamma1, t1, eq1) <- equations gamma0 term1
(gamma2, t2, eq2) <- equations gamma0 term2
freshT <- newTypeVar
return (uniteUnique gamma1 gamma2,
freshT,
insert (TEVar t1, TEArrow (TEVar t2) (TEVar freshT))
(eq1 `union` eq2 `union` (equateContexts gamma1 gamma2)))
-- Make union of two contexts, dropping possible duplicates
uniteUnique :: forall a . Eq a => TEContext a -> TEContext a -> TEContext a
uniteUnique ctxt1 ctxt2 = nub $ ctxt1 ++ (map mapper ctxt2)
where mapper :: TECtxtJudgment a -> TECtxtJudgment a
mapper (v,t) = case varTypeJudgment v ctxt1 of
Just (vv,tt) -> (vv,tt)
Nothing -> (v,t)
-- Create equations that equate types of duplicate judgements
equateContexts :: forall a . Eq a => TEContext a -> TEContext a -> Equations
equateContexts [] _ = empty
equateContexts ((x, t):rest) ctxt =
case varTypeJudgment x ctxt of
Just (_, t') -> S.insert (t,t') (equateContexts rest ctxt)
Nothing -> equateContexts rest ctxt
-- Infer the type of a term in a given context.
inferTE :: forall a . (Ord a, FreshPickable a)
=> TEContext a
-> LambdaTerm a
-> InferenceState (TEContext a, TypeExpr)
inferTE ctxt term = do (newCtxt, typeVar, eqs) <- equations ctxt term
sub <- lift $ unify eqs
let inferredType = applySubstitution (TEVar typeVar) sub
inferredContext = subsContext sub newCtxt
return (inferredContext, inferredType)
where subsContext :: Substitution -> TEContext a -> TEContext a
subsContext sub = map subsJudgment
where subsJudgment (x, t) = (x, applySubstitution t sub)
infer :: (Ord a, FreshPickable a)
=> Context a
-> LambdaTerm a
-> Maybe (Context a, Type)
infer ctxt term = do (teC, te) <- runInference (inferTE (contextToTEContext ctxt) term)
let maxmax = maxTEConst te
newCtxt = teContextToContext (maxmax + 1) teC
newType = instantiateTypeVar (maxmax +1) te
return (newCtxt, newType)
runInference :: InferenceState (TEContext a, TypeExpr) -> Maybe (TEContext a, TypeExpr)
runInference state = case runStateT state [] of
Just ((c,t),_) -> Just (c,t)
Nothing -> Nothing
maxTEConst :: TypeExpr -> Int
maxTEConst (TEVar _) = -1
maxTEConst (TEConst x) = x
maxTEConst (TEArrow t1 t2) = max (maxTEConst t1) (maxTEConst t2)
contextToTEContext :: Context a -> TEContext a
contextToTEContext [] = []
contextToTEContext ((x,t):rest) = (x, typeToTypeExpr t) : contextToTEContext rest
teContextToContext :: Int -> TEContext a -> Context a
teContextToContext startInt ctxt = Prelude.map mapper ctxt
where mapper (x,t) = (x, instantiateTypeVar startInt t)
typeToTypeExpr :: Type -> TypeExpr
typeToTypeExpr (TypeVar x) = TEConst x
typeToTypeExpr (Arrow t1 t2) = TEArrow (typeToTypeExpr t1) (typeToTypeExpr t2)
instantiateTypeVar :: Int -> TypeExpr -> Type
instantiateTypeVar startInt (TEVar x) = TypeVar (startInt + x)
instantiateTypeVar startInt (TEConst x) = TypeVar x
instantiateTypeVar startInt (TEArrow t1 t2) = Arrow (instantiateTypeVar startInt t1)
(instantiateTypeVar startInt t2)
| fsestini/stlc-machine | src/Inference.hs | mit | 7,199 | 0 | 14 | 2,055 | 2,150 | 1,118 | 1,032 | 128 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Database.Memcached.Binary.Internal where
import Network
import Foreign.Ptr
import Foreign.Storable
import Foreign.Marshal.Utils
import Foreign.Marshal.Alloc
import System.IO
import Control.Monad
import Control.Exception
import Data.Word
import Data.Pool
import Data.Storable.Endian
import qualified Data.HashMap.Strict as H
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Unsafe as S
import Database.Memcached.Binary.Types
import Database.Memcached.Binary.Types.Exception
import Database.Memcached.Binary.Internal.Definition
newtype Connection = Connection (Pool Handle)
withConnection :: ConnectInfo -> (Connection -> IO a) -> IO a
withConnection i m = withSocketsDo $ bracket (connect i) close m
connect :: ConnectInfo -> IO Connection
connect i = fmap Connection $
createPool (connect' i) (\h -> quit h >> hClose h) 1
(connectionIdleTime i) (numConnection i)
connect' :: ConnectInfo -> IO Handle
connect' i = loop (connectAuth i)
where
loop [] = do
connectTo (connectHost i) (connectPort i)
loop [a] = do
h <- connectTo (connectHost i) (connectPort i)
auth a (\_ -> return h) throwIO h
loop (a:as) = do
h <- connectTo (connectHost i) (connectPort i)
handle (\(_::IOError) -> loop as) $
auth a (\_ -> return h) (\_ -> loop as) h
close :: Connection -> IO ()
close (Connection p) = destroyAllResources p
useConnection :: (Handle -> IO a) -> Connection -> IO a
useConnection f (Connection p) = withResource p f
pokeWord8 :: Ptr a -> Word8 -> IO ()
pokeWord8 = poke . castPtr
pokeWord16be :: Ptr a -> Word16 -> IO ()
pokeWord16be p w = poke (castPtr p) (BE w)
pokeWord32be :: Ptr a -> Word32 -> IO ()
pokeWord32be p w = poke (castPtr p) (BE w)
pokeWord64be :: Ptr a -> Word64 -> IO ()
pokeWord64be p w = poke (castPtr p) (BE w)
peekWord8 :: Ptr a -> IO Word8
peekWord8 = peek . castPtr
peekWord16be :: Ptr a -> IO Word16
peekWord16be p = peek (castPtr p) >>= \(BE w) -> return w
peekWord32be :: Ptr a -> IO Word32
peekWord32be p = peek (castPtr p) >>= \(BE w) -> return w
peekWord64be :: Ptr a -> IO Word64
peekWord64be p = peek (castPtr p) >>= \(BE w) -> return w
pokeByteString :: Ptr a -> S.ByteString -> IO ()
pokeByteString p v =
S.unsafeUseAsCString v $ \cstr ->
copyBytes (castPtr p) cstr (S.length v)
pokeLazyByteString :: Ptr a -> L.ByteString -> IO ()
pokeLazyByteString p v =
void $ L.foldlChunks (\mi s -> mi >>= \i -> do
pokeByteString (plusPtr p i) s
return $ i + S.length s
) (return 0) v
data Header
data Request
mallocRequest :: OpCode -> Key -> Word8 -> (Ptr Request -> IO ())
-> Int -> (Ptr Request -> IO ()) -> Word32 -> CAS -> IO (Ptr Request)
mallocRequest (OpCode o) key elen epoke vlen vpoke opaque (CAS cas) = do
let tlen = S.length key + fromIntegral elen + vlen
p <- mallocBytes (24 + fromIntegral tlen)
pokeWord8 p 0x80
pokeWord8 (plusPtr p 1) o
pokeWord16be (plusPtr p 2) (fromIntegral $ S.length key)
pokeWord8 (plusPtr p 4) elen
pokeWord8 (plusPtr p 5) 0x00
pokeWord16be (plusPtr p 6) 0x00
pokeWord32be (plusPtr p 8) (fromIntegral tlen)
pokeWord32be (plusPtr p 12) opaque
pokeWord64be (plusPtr p 16) cas
epoke (plusPtr p 24)
pokeByteString (plusPtr p $ 24 + fromIntegral elen) key
vpoke (plusPtr p $ 24 + fromIntegral elen + S.length key)
return p
{-# INLINE mallocRequest #-}
sendRequest :: OpCode -> Key -> Word8 -> (Ptr Request -> IO ())
-> Int -> (Ptr Request -> IO ()) -> Word32 -> CAS -> Handle -> IO ()
sendRequest op key elen epoke vlen vpoke opaque cas h =
bracket (mallocRequest op key elen epoke vlen vpoke opaque cas) free $ \req -> do
hPutBuf h req (24 + S.length key + fromIntegral elen + vlen)
hFlush h
{-# INLINE sendRequest #-}
type Failure a = MemcachedException -> IO a
peekResponse :: (Ptr Header -> IO a) -> Failure a -> Handle -> IO a
peekResponse success failure h = allocaBytes 24 $ \p -> do
len <- hGetBuf h p 24
if len /= 24
then failure DataReadFailed
else do
peekWord16be (plusPtr p 6) >>= \st ->
if st == 0
then success p
else do
bl <- peekWord32be (plusPtr p 8)
failure . MemcachedException st =<< S.hGet h (fromIntegral bl)
{-# INLINE peekResponse #-}
withRequest :: OpCode -> Key -> Word8 -> (Ptr Request -> IO ())
-> Int -> (Ptr Request -> IO ()) -> CAS
-> (Handle -> Ptr Header -> IO a) -> Failure a -> Handle -> IO a
withRequest op key elen epoke vlen vpoke cas success failure h = do
sendRequest op key elen epoke vlen vpoke 0 cas h
peekResponse (success h) failure h
getExtraLength :: Ptr Header -> IO Word8
getExtraLength p = peekWord8 (plusPtr p 4)
getKeyLength :: Ptr Header -> IO Word16
getKeyLength p = peekWord16be (plusPtr p 2)
getTotalLength :: Ptr Header -> IO Word32
getTotalLength p = peekWord32be (plusPtr p 8)
getCAS :: Ptr Header -> IO CAS
getCAS p = fmap CAS $ peekWord64be (plusPtr p 16)
getOpaque :: Ptr Header -> IO Word32
getOpaque p = peekWord32be (plusPtr p 12)
nop :: Ptr Request -> IO ()
nop _ = return ()
inspectResponse :: Handle -> Ptr Header
-> IO (S.ByteString, S.ByteString, L.ByteString)
inspectResponse h p = do
el <- getExtraLength p
kl <- getKeyLength p
tl <- getTotalLength p
e <- S.hGet h $ fromIntegral el
k <- S.hGet h $ fromIntegral kl
v <- L.hGet h $ fromIntegral tl - fromIntegral el - fromIntegral kl
return (e,k,v)
getSuccessCallback :: (Flags -> Value -> IO a) -> Failure a
-> Handle -> Ptr Header -> IO a
getSuccessCallback success failure h p = do
elen <- getExtraLength p
tlen <- getTotalLength p
len <- hGetBuf h p 4
if len /= 4
then failure DataReadFailed
else do
flags <- peekWord32be p
value <- L.hGet h (fromIntegral tlen - fromIntegral elen)
success flags value
get :: (Flags -> Value -> IO a) -> Failure a
-> Key -> Handle -> IO a
get success failure key =
withRequest opGet key 0 nop 0 nop (CAS 0)
(getSuccessCallback success failure) failure
getWithCAS :: (CAS -> Flags -> Value -> IO a) -> Failure a
-> Key -> Handle -> IO a
getWithCAS success failure key =
withRequest opGet key 0 nop 0 nop (CAS 0)
(\h p -> getCAS p >>= \c -> getSuccessCallback (success c) failure h p) failure
setAddReplace :: IO a -> Failure a -> OpCode -> CAS
-> Key -> Value -> Flags -> Expiry -> Handle -> IO a
setAddReplace success failure o cas key value flags expiry = withRequest o key
8 (\p -> pokeWord32be p flags >> pokeWord32be (plusPtr p 4) expiry)
(fromIntegral $ L.length value) (flip pokeLazyByteString value) cas (\_ _ -> success) failure
setAddReplaceWithCAS :: (CAS -> IO a) -> Failure a -> OpCode -> CAS
-> Key -> Value -> Flags -> Expiry -> Handle -> IO a
setAddReplaceWithCAS success failure o cas key value flags expiry = withRequest o key
8 (\p -> pokeWord32be p flags >> pokeWord32be (plusPtr p 4) expiry)
(fromIntegral $ L.length value) (flip pokeLazyByteString value) cas (\_ p -> getCAS p >>= success) failure
delete :: IO a -> Failure a -> CAS -> Key -> Handle -> IO a
delete success failure cas key =
withRequest opDelete key 0 nop 0 nop cas (\_ _ -> success) failure
incrDecr :: (Word64 -> IO a) -> Failure a -> OpCode -> CAS
-> Key -> Delta -> Initial -> Expiry -> Handle -> IO a
incrDecr success failure op cas key delta initial expiry =
withRequest op key 20 extra 0 nop cas success' failure
where
extra p = do
pokeWord64be p delta
pokeWord64be (plusPtr p 8) initial
pokeWord32be (plusPtr p 16) expiry
success' h p = do
len <- hGetBuf h p 8
if len /= 8
then failure DataReadFailed
else peekWord64be p >>= success
quit :: Handle -> IO ()
quit h = do
sendRequest opQuit "" 0 nop 0 nop 0 (CAS 0) h
peekResponse (\_ -> return ()) (\_ -> return ()) h
flushAll :: IO a -> Failure a -> Handle -> IO a
flushAll success =
withRequest opFlush "" 0 nop 0 nop (CAS 0) (\_ _ -> success)
flushWithin :: IO a -> Failure a -> Expiry -> Handle -> IO a
flushWithin success failure w =
withRequest opFlush "" 4 (flip pokeWord32be w) 0 nop (CAS 0)
(\_ _ -> success) failure
noOp :: IO a -> Failure a -> Handle -> IO a
noOp success =
withRequest opNoOp "" 0 nop 0 nop (CAS 0) (\_ _ -> success)
version :: (S.ByteString -> IO a) -> Failure a -> Handle -> IO a
version success =
withRequest opVersion "" 0 nop 0 nop (CAS 0)
(\h p -> getTotalLength p >>= S.hGet h . fromIntegral >>= success)
appendPrepend :: IO a -> Failure a -> OpCode -> CAS
-> Key -> Value -> Handle -> IO a
appendPrepend success failure op cas key value = withRequest op key 0 nop
(fromIntegral $ L.length value) (flip pokeLazyByteString value)
cas (\_ _ -> success) failure
stats :: Handle -> IO (H.HashMap S.ByteString S.ByteString)
stats h = loop H.empty
where
loop m = do
sendRequest opStat "" 0 nop 0 nop 0 (CAS 0) h
peekResponse (success m) throwIO h
success m p = getTotalLength p >>= \tl ->
if tl == 0
then return m
else do
kl <- getKeyLength p
k <- S.hGet h (fromIntegral kl)
v <- S.hGet h (fromIntegral tl - fromIntegral kl)
loop (H.insert k v m)
verbosity :: IO a -> Failure a -> Word32 -> Handle -> IO a
verbosity success failure v = withRequest opVerbosity ""
4 (flip pokeWord32be v) 0 nop (CAS 0) (\_ _ -> success) failure
touch :: (Flags -> Value -> IO a) -> Failure a -> OpCode
-> Key -> Expiry -> Handle -> IO a
touch success failure op key e =
withRequest op key 4 (flip pokeWord32be e) 0 nop (CAS 0)
(getSuccessCallback success failure) failure
saslListMechs :: (S.ByteString -> IO a) -> Failure a
-> Handle -> IO a
saslListMechs success failure =
withRequest opSaslListMechs "" 0 nop 0 nop (CAS 0)
(\h p -> getTotalLength p >>= S.hGet h . fromIntegral >>= success)
failure
auth :: Auth -> (S.ByteString -> IO a) -> Failure a -> Handle -> IO a
auth (Plain u w) success next h = do
sendRequest opSaslAuth "PLAIN" 0 nop (S.length u + S.length w + 2) pokeCred 0 (CAS 0) h
peekResponse consumeResponse next h
where
ul = S.length u
pokeCred p = do
pokeWord8 p 0
pokeByteString (plusPtr p 1) u
pokeWord8 (plusPtr p $ ul + 1) 0
pokeByteString (plusPtr p $ ul + 2) w
consumeResponse p = do
l <- getTotalLength p
success =<< S.hGet h (fromIntegral l)
| philopon/memcached-binary | src/Database/Memcached/Binary/Internal.hs | mit | 11,079 | 0 | 20 | 2,952 | 4,569 | 2,220 | 2,349 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Parsing.AST where
import Test.QuickCheck hiding (variant)
import Halt.AST
import Halt.Utility
import Control.Applicative
import Data.Char
import Data.List
import Data.Maybe
import Halt.Printing.Pretty
validNames :: [String]
validNames = ["apple", "pear", "banana", "pineapple", "grape", "lemon", "orange", "tangerine"]
capitalLetter :: Gen Char
capitalLetter = elements ['A'..'Z']
lowerLetter :: Gen Char
lowerLetter = toLower <$> capitalLetter
identifierSymbol :: Gen Char
identifierSymbol = elements $ ['A'..'Z'] ++ ['a'..'z'] ++ "'_"
capitalize :: String -> String
capitalize (x : xs) = toUpper x : xs
capitalize _ = error "Cannot capitalize an empty string"
capitalIdentifier :: Gen String
--capitalIdentifier = (:) <$> capitalLetter <*> (resize 5 $ listOf identifierSymbol)
capitalIdentifier = concat <$> resize 2 (listOf1 $ capitalize <$> elements validNames)
lowerIdentifier :: Gen String
--lowerIdentifier = (:) <$> lowerLetter <*> (resize 5 $ listOf identifierSymbol)
lowerIdentifier = elements validNames <++>
(concat <$> resize 1 (listOf $ capitalize <$> elements validNames))
rightCapitalIdentifier :: Gen String
rightCapitalIdentifier = intercalate "." <$> resize 3 (listOf1 capitalIdentifier)
rightLowerIdentifier :: Gen String
rightLowerIdentifier = resize 2 rightCapitalIdentifier <++> return "." <++> lowerIdentifier
rightIdentifier :: Gen String
rightIdentifier = oneof [rightCapitalIdentifier, rightLowerIdentifier]
positiveNum :: (Arbitrary a, Num a, Ord a) => Gen a
positiveNum = (\(Positive n) -> n) <$> arbitrary
data ExpressionVariant = NonLiteral | WithLiterals
expression' :: ExpressionVariant -> Gen Expression
expression' variant = frequency $
[ (1, FunctionApp <$> expression' NonLiteral <*> resize 4 (listOf1 arbitrary))
, (3, Identifier <$> rightIdentifier) ]
++ case variant of WithLiterals -> [ (3, IntLiteral <$> positiveNum)
, (3, DoubleLiteral <$> positiveNum)
, (3, StringLiteral <$> arbitrary) ]
_ -> []
instance Arbitrary Expression where
arbitrary = expression' WithLiterals
shrink (FunctionApp f args) = f : args
shrink _ = []
instance Arbitrary Bound where
arbitrary = frequency
[ (2, StaticBound <$> (IntLiteral <$> positiveNum))
, (1, DynamicWithStaticBound <$> arbitrary <*> (IntLiteral <$> positiveNum)) ]
maybeGen :: Gen a -> Gen (Maybe a)
maybeGen g = oneof [Just <$> g, return Nothing]
statements3 :: Gen [Statement]
statements3 = resize 3 $ listOf1 arbitrary
instance Arbitrary Statement where
arbitrary = frequency
[ (3, Assignment <$> arbitrary <*> lowerIdentifier <*> arbitrary)
, (1, If <$> arbitrary <*> statements3 <*> maybeGen statements3)
, (2, For <$> lowerIdentifier <*> arbitrary <*> arbitrary <*> statements3)
, (3, Return <$> arbitrary)
, (3, NakedExpr <$> arbitrary) ]
shrink (If _ stmts mby) = stmts ++ fromMaybe [] mby
shrink (For _ _ _ stmts) = stmts
shrink _ = []
data TypeLiteralVariant = WithVar | WithUnit | WithNothing
typeLiteral :: TypeLiteralVariant -> Gen TypeLiteral
typeLiteral variant = frequency $
[ (3, Parameter <$> lowerLetter)
, (3, Concrete <$> rightCapitalIdentifier)
, (1, Generic <$> rightCapitalIdentifier <*> resize 3 (listOf1 (typeLiteral WithNothing)))
, (2, Function <$> typeLiteral WithNothing <*> typeLiteral WithUnit) ]
++ case variant of WithVar -> [(3, return Var)]
WithUnit -> [(3, return Unit)]
WithNothing -> []
instance Arbitrary TypeLiteral where
arbitrary = typeLiteral WithVar
shrink (Generic _ typs) = typs
shrink (Function l r) = [l, r]
shrink _ = []
import' :: Gen Declaration
import' = oneof [Import <$> rightCapitalIdentifier
, ImportAs <$> rightCapitalIdentifier <*> capitalIdentifier]
numArgs :: TypeLiteral -> Int
numArgs (Function _ r) = 1 + numArgs r
numArgs _ = 0
function :: Gen (Declaration, Declaration)
function = do
name <- lowerIdentifier
typ <- Function <$> typeLiteral WithNothing <*> typeLiteral WithUnit
let n = numArgs typ
args <- vectorOf n lowerIdentifier
body <- statements3
return ( FunctionType name typ
, FunctionDecl name args body )
dataType :: Gen Declaration
dataType = oneof [ data'
, record ]
data' :: Gen Declaration
data' = Data <$> capitalIdentifier
<*> resize 3 (listOf lowerLetter) <*> resize 4 (listOf1 dataCase)
record :: Gen Declaration
record = Record <$> capitalIdentifier
<*> resize 4 (listOf1 ((,) <$> lowerIdentifier <*> typeLiteral WithNothing))
dataCase :: Gen (String, [TypeLiteral])
dataCase = (,) <$> capitalIdentifier <*> resize 3 (listOf $ typeLiteral WithNothing)
newtype Program = Program [Declaration]
instance Show Program where
show (Program decls) = prettyShow decls
newtype DataType = DataType { getDeclaration :: Declaration }
shrinkDataType :: Declaration -> [Declaration]
shrinkDataType (Data name params cases)
| length cases > 1 = (Data name params . return) <$> cases
| otherwise = []
shrinkDataType (Record name fields)
| length fields > 1 = (Record name . return) <$> fields
| otherwise = []
shrinkDataType _ = []
instance Arbitrary DataType where
arbitrary = DataType <$> dataType
shrink (DataType d) = DataType <$> shrinkDataType d
instance Arbitrary Program where
arbitrary = do
imports <- resize 3 $ listOf1 import'
dataTypes <- resize 3 $ listOf1 (getDeclaration <$> arbitrary)
functions <- concatMap (\(t, d) -> [t, d]) <$> resize 4 (listOf1 function)
return $ Program $ imports ++ dataTypes ++ functions
shrink (Program [_]) = []
shrink (Program decls) = map (Program . return) decls
test :: IO ()
test = do
progs <- sample' (arbitrary :: Gen Program)
let str = intercalate "\n\n" $ map (prettyShow . (\(Program d) -> d)) progs
writeFile "randomPrograms" str
| LukaHorvat/Halt | test/Parsing/AST.hs | mit | 6,222 | 0 | 17 | 1,430 | 1,958 | 1,023 | 935 | 135 | 3 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{- |
Module : Data.RTree.Base
Copyright : Copyright (c) 2015, Birte Wagner, Sebastian Philipp
License : MIT
Maintainer : Birte Wagner, Sebastian Philipp ([email protected])
Stability : experimental
Portability: not portable
Internal implementations. Use 'Data.RTree' instead or use at you own risc.
-}
module Data.RTree.Base
(
-- * Data Type
RTree (..)
-- * Constructors
, empty
, singleton
-- * Modification
, insert
, insertWith
, delete
, mapMaybe
-- ** Merging
, union
, unionWith
-- * Searching and Properties
, lookup
, intersectWithKey
, intersect
, lookupRange
, lookupRangeWithKey
, lookupContainsRangeWithKey
, lookupContainsRange
, length
, null
, keys
, values
-- * Lists
, fromList
, toList
-- * Internal and Testing
, foldWithMBB
, pp
, isValid
, unionDistinct
, unionDistinctWith
, fromList'
, unionDistinctSplit
, depth
, areaIncreasesWith
, partition
, getChildren
, unionMBB'
, createNodeWithChildren
, n
, splitNode
, node
)
where
import Control.DeepSeq (NFData, rnf)
import Data.Binary
import Data.Function (on)
import Data.List (maximumBy, minimumBy, partition)
import qualified Data.List as L (length, map)
import Data.Maybe (catMaybes, isJust)
import qualified Data.Maybe as Maybe (mapMaybe)
import Data.Typeable (Typeable)
import Data.Semigroup
import GHC.Generics (Generic)
import Prelude hiding (length, lookup, map, null)
import Data.RTree.MBB hiding (mbb)
data RTree a =
Node4 {getMBB :: {-# UNPACK #-} ! MBB, getC1 :: ! (RTree a), getC2 :: ! (RTree a), getC3 :: ! (RTree a), getC4 :: ! (RTree a) }
| Node3 {getMBB :: {-# UNPACK #-} ! MBB, getC1 :: ! (RTree a), getC2 :: ! (RTree a), getC3 :: ! (RTree a) }
| Node2 {getMBB :: {-# UNPACK #-} ! MBB, getC1 :: ! (RTree a), getC2 :: ! (RTree a) }
| Node {getMBB :: MBB, getChildren' :: [RTree a] }
| Leaf {getMBB :: {-# UNPACK #-} ! MBB, getElem :: a}
| Empty
deriving (Show, Eq, Typeable, Generic, Functor)
-- | It is possible, to change these constants, but the tree won't be space optimal anymore.
m, n :: Int
m = 2
n = 4
unionMBB' :: RTree a -> RTree a -> MBB
unionMBB' = unionMBB `on` getMBB
-- ---------------
-- smart constuctors
-- | creates an empty tree
empty :: RTree a
empty = Empty
-- | returns 'True', if empty
--
-- prop> null empty = True
null :: RTree a -> Bool
null Empty = True
null _ = False
-- | creates a single element tree
singleton :: MBB -> a -> RTree a
singleton = Leaf
node :: MBB -> [RTree a] -> RTree a
node mbb [x,y] = Node2 mbb x y
node mbb [x,y,z] = Node3 mbb x y z
node mbb [x,y,z,w] = Node4 mbb x y z w
node _ [] = error "node: empty"
node mbb xs = Node mbb xs
createNodeWithChildren :: [RTree a] -> RTree a
createNodeWithChildren c = node (unionsMBB $ getMBB <$> c) c
norm :: RTree a -> RTree a
norm (Node4 mbb x y z w) = Node mbb [x,y,z,w]
norm (Node3 mbb x y z) = Node mbb [x,y,z]
norm (Node2 mbb x y) = Node mbb [x,y]
norm x = x
getChildren :: RTree a -> [RTree a]
getChildren Empty = error "getChildren: Empty"
getChildren Leaf{} = error "getChildren: Leaf"
getChildren t = getChildren' $ norm t
-- ----------------------------------
-- Lists
-- | creates a tree out of pairs
fromList :: [(MBB, a)] -> RTree a
fromList l = fromList' $ uncurry singleton <$> l
-- | merges all singletons into a single tree.
fromList' :: [RTree a] -> RTree a
fromList' [] = empty
fromList' ts = foldr1 unionDistinct ts
-- | creates a list of pairs out of a tree
--
-- prop> toList t = zip (keys t) (values t)
toList :: RTree a -> [(MBB, a)]
toList Empty = []
toList (Leaf mbb x) = [(mbb, x)]
toList t = concatMap toList $ getChildren t
-- | returns all keys in this tree
--
-- prop> toList t = zip (keys t) (values t)
keys :: RTree a -> [MBB]
keys = foldWithMBB (\mbb _ -> [mbb]) (const concat) []
-- | returns all values in this tree
--
-- prop> toList t = zip (keys t) (values t)
values :: RTree a -> [a]
values = foldWithMBB (const pure) (const concat) []
-- ----------------------------------
-- insert
-- | Inserts an element whith the given 'MBB' and a value in a tree. The combining function will be used if the value already exists.
insertWith :: (a -> a -> a) -> MBB -> a -> RTree a -> RTree a
insertWith f mbb e = unionDistinctWith f (singleton mbb e)
-- | Inserts an element whith the given 'MBB' and a value in a tree. An existing value will be overwritten with the given one.
--
-- prop> insert = insertWith const
insert :: MBB -> a -> RTree a -> RTree a
insert = insertWith const
simpleMergeEqNode :: (a -> a -> a) -> RTree a -> RTree a -> RTree a
simpleMergeEqNode f l@Leaf{} r = Leaf (getMBB l) (on f getElem l r)
simpleMergeEqNode _ l _ = l
-- | Unifies left and right 'RTree'. Will create invalid trees, if the tree is not a leaf and contains 'MBB's which
-- also exists in the left tree. Much faster than union, though.
unionDistinctWith :: (a -> a -> a) -> RTree a -> RTree a -> RTree a
unionDistinctWith _ Empty{} t = t
unionDistinctWith _ t Empty{} = t
unionDistinctWith f t1@Leaf{} t2@Leaf{}
| on (==) getMBB t1 t2 = simpleMergeEqNode f t1 t2
| otherwise = createNodeWithChildren [t1, t2] -- root case
unionDistinctWith f left right
| depth left > depth right = unionDistinctWith f right left
| depth left == depth right = fromList' $ getChildren left ++ [right]
| L.length (getChildren newNode) > n = createNodeWithChildren $ splitNode newNode
| otherwise = newNode
where
newNode = addLeaf f left right
-- | Únifies left and right 'RTree'. Will create invalid trees, if the tree is not a leaf and contains 'MBB's which
-- also exists in the left tree. Much faster than union, though.
unionDistinct :: RTree a -> RTree a -> RTree a
unionDistinct = unionDistinctWith const
addLeaf :: (a -> a -> a) -> RTree a -> RTree a -> RTree a
addLeaf f left right
| depth left + 1 == depth right = node (newNode `unionMBB'` right) (newNode : nonEq)
| otherwise = node (left `unionMBB'` right) newChildren
where
newChildren = findNodeWithMinimalAreaIncrease f left (getChildren right)
(eq, nonEq) = partition (on (==) getMBB left) $ getChildren right
newNode = case eq of
[] -> left
[x] -> simpleMergeEqNode f left x
_ -> error "addLeaf: invalid RTree"
findNodeWithMinimalAreaIncrease :: (a -> a -> a) -> RTree a -> [RTree a] -> [RTree a]
findNodeWithMinimalAreaIncrease f leaf children = splitMinimal xsAndIncrease
where
-- xsAndIncrease :: [(RTree a, Double)]
xsAndIncrease = zip children $ areaIncreasesWith leaf <$> children
minimalIncrease = minimum $ snd <$> xsAndIncrease
-- xsAndIncrease' :: [(RTree a, Double)]
splitMinimal [] = []
splitMinimal ((t,mbb):xs)
| mbb == minimalIncrease = unionDistinctSplit f leaf t ++ (fst <$> xs)
| otherwise = t : splitMinimal xs
unionDistinctSplit :: (a -> a -> a) -> RTree a -> RTree a -> [RTree a]
unionDistinctSplit f leaf e
| L.length (getChildren newLeaf) > n = splitNode newLeaf
| otherwise = [newLeaf]
where
newLeaf = addLeaf f leaf e
-- | /O(n²)/ solution
splitNode :: RTree a -> [RTree a]
splitNode Leaf{} = error "splitNode: Leaf"
splitNode e = [createNodeWithChildren x1, createNodeWithChildren x2]
where
(l, r) = findGreatestArea $ getChildren e
(x1, x2) = quadSplit [l] [r] unfinished
unfinished = filter (on (/=) getMBB l) $ filter (on (/=) getMBB r) $ getChildren e
findGreatestArea :: [RTree a] -> (RTree a, RTree a)
findGreatestArea xs = (x', y')
where
xs' = zip xs [(1::Int)..]
listOfTripels = [(fst x, fst y, on unionMBB' fst x y) | x <- xs', y <- xs', ((<) `on` snd) x y]
(x', y', _) = maximumBy (compare `on` (\(_,_,x) -> area x)) listOfTripels
quadSplit :: [RTree a] -> [RTree a] -> [RTree a] -> ([RTree a], [RTree a])
quadSplit left right [] = (left, right)
quadSplit left right unfinished
| L.length left + L.length unfinished <= m = (left ++ unfinished, right)
| L.length right + L.length unfinished <= m = (left, right ++ unfinished)
| isLeft'' = quadSplit (minimumElem : left) right newRest
| otherwise = quadSplit left (minimumElem : right) newRest
where
-- makeTripel :: RTree a -> (RTree a, Bool, Double)
makeTripel x = (x, isLeft, growth)
where
isLeft = areaIncreasesWithLeft < areaIncreasesWithRight
growth = if isLeft
then areaIncreasesWithLeft
else areaIncreasesWithRight
areaIncreasesWithLeft = areaIncreasesWith x $ createNodeWithChildren left
areaIncreasesWithRight = areaIncreasesWith x $ createNodeWithChildren right
(minimumElem, isLeft'', _) = minimumBy (compare `on` (\(_,_,g) -> g)) $ makeTripel <$> unfinished
newRest = filter (on (/=) getMBB minimumElem) unfinished
--mergeNodes :: RTree a -> RTree a -> RTree a
--mergeNodes x@Node{} y@Node{} = node (unionMBB' x y) (on (++) getChildren x y)
--mergeNodes _ _ = error "no merge for Leafs"
-- ------------
-- helpers
areaIncreasesWith :: RTree a -> (RTree a) -> Double
areaIncreasesWith newElem current = newArea - currentArea
where
currentArea = area $ getMBB current
newArea = area $ unionMBB' newElem current
-- -----------------
-- lookup
-- | returns the value if it exists in the tree
lookup :: MBB -> RTree a -> Maybe a
lookup _ Empty = Nothing
lookup mbb t@Leaf{}
| mbb == getMBB t = Just $ getElem t
| otherwise = Nothing
lookup mbb t = case founds of
[] -> Nothing
x:_ -> Just x
where
matches = filter (\x -> (getMBB x) `containsMBB` mbb) $ getChildren t
founds = catMaybes $ L.map (lookup mbb) matches
-- | returns all keys and values, which intersects with the given bounding box.
intersectWithKey :: MBB -> RTree a -> [(MBB, a)]
intersectWithKey _ Empty = []
intersectWithKey mbb t@Leaf{}
| isJust $ intersectMBB mbb (getMBB t) = [(getMBB t, getElem t)]
| otherwise = []
intersectWithKey mbb t = founds
where matches = filter intersectRTree $ getChildren t
founds = concatMap (intersectWithKey mbb) matches
intersectRTree x = isJust $ mbb `intersectMBB` (getMBB x)
-- | returns all values, which intersects with the given bounding box.
intersect :: MBB -> RTree a -> [a]
intersect mbb t = snd <$> intersectWithKey mbb t
-- | returns all keys and values, which are located in the given bounding box.
lookupRangeWithKey :: MBB -> RTree a -> [(MBB, a)]
lookupRangeWithKey _ Empty = []
lookupRangeWithKey mbb t@Leaf{}
| mbb `containsMBB` (getMBB t) = [(getMBB t, getElem t)]
| otherwise = []
lookupRangeWithKey mbb t = founds
where
matches = filter intersectRTree $ getChildren t
founds = concatMap (lookupRangeWithKey mbb) matches
intersectRTree x = isJust $ mbb `intersectMBB` (getMBB x)
-- | returns all values, which are located in the given bounding box.
lookupRange :: MBB -> RTree a -> [a]
lookupRange mbb t = snd <$> (lookupRangeWithKey mbb t)
-- | returns all keys and values containing the given bounding box
lookupContainsRangeWithKey :: MBB -> RTree a -> [(MBB, a)]
lookupContainsRangeWithKey _ Empty = []
lookupContainsRangeWithKey mbb t@Leaf{}
| (getMBB t) `containsMBB` mbb = [(getMBB t, getElem t)]
| otherwise = []
lookupContainsRangeWithKey mbb t = founds
where
matches = filter intersectRTree $ getChildren t
founds = concatMap (lookupContainsRangeWithKey mbb) matches
intersectRTree x = (getMBB x) `containsMBB` mbb
-- | returns all values containing the given bounding box
lookupContainsRange :: MBB -> RTree a -> [a]
lookupContainsRange mbb t = snd <$> (lookupContainsRangeWithKey mbb t)
-- -----------
-- delete
-- | Delete a key and its value from the RTree. When the key is not a member of the tree, the original tree is returned.
delete :: MBB -> RTree a -> RTree a
delete _ Empty = Empty
delete mbb t@Leaf{}
| mbb == getMBB t = Empty
| otherwise = t
delete mbb root
| L.length (getChildren newRoot) == 1 = head $ getChildren newRoot
| otherwise = newRoot
where
newRoot = delete' mbb root
delete' :: MBB -> RTree a -> RTree a
delete' mbb t@Leaf{}
| mbb == getMBB t = Empty
| otherwise = t
delete' mbb t = fromList' $ orphans ++ [newValidNode]
where
(matches, noMatches) = partition (\x -> (getMBB x) `containsMBB` mbb) $ getChildren t
matches' = filter (not . null) $ L.map (delete' mbb) matches
(orphans, validMatches) = foldr handleInvalid ([], []) matches'
-- handleInvalid :: RTree a -> ([RTree a], [RTree a]) -> ([RTree a], [RTree a])
handleInvalid l@Leaf{} (orphans', validMatches') = (orphans', l:validMatches')
handleInvalid invalidNode (orphans', validMatches')
| L.length children < m = (children ++ orphans', validMatches')
| otherwise = (orphans', invalidNode:validMatches')
where
children = getChildren invalidNode
newValidNode = createNodeWithChildren $ validMatches ++ noMatches
-- ---------------
foldWithMBB :: (MBB -> a -> b) -> (MBB -> [b] -> b) -> b -> RTree a -> b
foldWithMBB _ _ n' Empty = n'
foldWithMBB f _ _ t@Leaf{} = f (getMBB t) (getElem t)
foldWithMBB f g n' t = g (getMBB t) $ foldWithMBB f g n' <$> (getChildren t)
-- | Unifies the first and the second tree into one. The combining function is used for elemets which exists in both trees.
unionWith :: (a -> a -> a) -> RTree a -> RTree a -> RTree a
unionWith _ l Empty = l
unionWith _ Empty r = r
unionWith f t1 t2
| depth t1 <= depth t2 = foldr (uncurry (insertWith f)) t2 (toList t1)
| otherwise = unionWith f t2 t1
-- | Unifies the first and the second tree into one.
-- If an 'MBB' is a key in both trees, the value from the left tree is chosen.
--
-- prop> union = unionWith const
union :: RTree a -> RTree a -> RTree a
union = unionWith const
-- | map, which also filters Nothing values
mapMaybe :: (a -> Maybe b) -> RTree a -> RTree b
mapMaybe f t = fromList $ Maybe.mapMaybe func $ toList t
where
func (mbb,x) = case f x of
Nothing -> Nothing
Just x' -> Just (mbb, x')
-- ---------------
isValid :: Show b => b -> RTree a -> Bool
isValid _ Empty = True
isValid _ Leaf{} = True
isValid context x = case L.length c >= m && L.length c <= n && (and $ (isValid context) <$> c) && (isBalanced x) of
True -> True
False -> error ( "invalid " ++ show (L.length c) ++ " " ++ show context )
where
isBalanced :: RTree a -> Bool
isBalanced (Leaf _ _ ) = True
isBalanced x' = (and $ isBalanced <$> c') && (and $ (== depth (head c')) <$> (depth <$> c'))
where
c' = getChildren x'
c = getChildren x
-- ----------------------
i_ :: String
i_ = " "
pp :: (Show a) => RTree a -> IO ()
pp = pp' ""
pp' :: (Show a) => String -> RTree a -> IO ()
pp' i Empty = putStrLn $ i ++ "Empty"
pp' i (Leaf mbb x) = putStrLn $ i ++ "Leaf " ++ (show mbb) ++ " " ++ (show x)
pp' i (Node mbb cs) = do
putStrLn $ i ++ "Node " ++ (show mbb)
mapM_ (pp' (i ++ i_)) cs
pp' i (Node2 mbb c1 c2) = do
putStrLn $ i ++ "Node2 " ++ (show mbb)
mapM_ (pp' (i ++ i_)) [c1, c2]
pp' i (Node3 mbb c1 c2 c3) = do
putStrLn $ i ++ "Node3 " ++ (show mbb)
mapM_ (pp' (i ++ i_)) [c1, c2, c3]
pp' i (Node4 mbb c1 c2 c3 c4) = do
putStrLn $ i ++ "Node4 " ++ (show mbb)
mapM_ (pp' (i ++ i_)) [c1, c2, c3, c4]
-- ----------------------
depth :: RTree a -> Int
depth Empty = 0
depth (Leaf _ _ ) = 1
depth t = 1 + (depth $ head $ getChildren t)
-- | returns the number of elements in a tree
length :: RTree a -> Int
length Empty = 0
length (Leaf {}) = 1
length t = sum $ length <$> (getChildren t)
--delete' :: MBB -> RTree a -> Either (RTree a) [(MBB, a)]
instance NFData a => NFData (RTree a) where
rnf (Empty) = ()
rnf (Leaf _ e) = {-rnf m `seq`-} rnf e
rnf (Node _ cs) = {-rnf m `seq`-} rnf cs
rnf (Node2 _ c1 c2) = {-rnf m `seq`-} rnf c1 `seq` rnf c2
rnf (Node3 _ c1 c2 c3) = {-rnf m `seq`-} rnf c1 `seq` rnf c2 `seq` rnf c3
rnf (Node4 _ c1 c2 c3 c4) = {-rnf m `seq`-} rnf c1 `seq` rnf c2 `seq` rnf c3 `seq` rnf c4
instance (Binary a) => Binary (RTree a) where
put (Empty) = put (0::Word8)
put (Leaf mbb e) = put (1::Word8) >> put mbb >> put e
put t = put (2::Word8) >> put (getMBB t) >> put (getChildren t)
get = do
!tag <- getWord8
case tag of
0 -> return Empty
1 -> do
!mbb <- get
!e <- get
return $! Leaf mbb e
2 -> do
!mbb <- get
!c <- get
return $! node mbb c
_ -> fail "RTree.get: error while decoding RTree"
instance (Semigroup a) => Semigroup (RTree a) where
(<>) = unionWith (<>)
instance Monoid a => Monoid (RTree a) where
mempty = empty
mappend = (<>)
| sebastian-philipp/r-tree | Data/RTree/Base.hs | mit | 17,967 | 26 | 16 | 4,930 | 5,912 | 3,085 | 2,827 | 351 | 3 |
{-# LANGUAGE BangPatterns #-}
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as UV
import qualified Data.ByteString.Char8 as C
data LabelPixel = LabelPixel {_label :: !Int, _pixels :: !(UV.Vector Int)}
trim :: C.ByteString -> C.ByteString
trim = C.reverse . C.dropWhile (`elem` " \t") . C.reverse . C.dropWhile (`elem` " \t")
slurpFile :: FilePath -> IO (V.Vector LabelPixel)
slurpFile = fmap (V.fromList . map make . tail . C.lines) . C.readFile where
readInt' !bs = let Just (i, _) = C.readInt bs in i
labelPixel (lbl:pxls) = LabelPixel (readInt' lbl) (UV.fromList $ map readInt' pxls)
make = labelPixel . C.split ',' . trim
classify :: V.Vector LabelPixel -> UV.Vector Int -> Int
classify !training !pixels = _label mini where
dist !x !y = UV.sum . UV.map (^2) $ UV.zipWith (-) x y
comp !p1 !p2 = dist (_pixels p1) pixels `compare` dist (_pixels p2) pixels
mini = V.minimumBy comp training
main :: IO ()
main = do
trainingSet <- slurpFile "trainingsample.csv"
validationSample <- slurpFile "validationsample.csv"
let isCorrect x = classify trainingSet (_pixels x) == _label x
numCorrect = V.length $ V.filter isCorrect validationSample
flt = fromIntegral
percentCorrect = flt numCorrect / flt (V.length validationSample) * 100.0
putStrLn $ "Percentage correct: " ++ show percentCorrect
| Zomega/thesis | Wurm/Trajectory/RRT/KNN.hs | mit | 1,356 | 0 | 15 | 246 | 529 | 268 | 261 | 30 | 1 |
module ARD.Rendering where
data RenderContext
= RenderContext
{ pixelNumber :: !Int
, rayNumber :: !Int
, pixelRandom :: !Int
}
| crazymaik/ard-haskell | lib/ARD/Rendering.hs | mit | 140 | 0 | 9 | 32 | 36 | 21 | 15 | 12 | 0 |
module Y2018.D01Spec (spec) where
import Y2018
import Test.Hspec
spec :: Spec
spec = parallel $ do
describe "Day 1" $ do
describe "frequency" $ do
it "sums the first example" $
frequency (unlines ["+1", "-2", "+3", "+1"]) `shouldBe` Just 3
it "sums the second example" $
frequency (unlines ["+1", "+1", "+1"]) `shouldBe` Just 3
it "sums the third example" $
frequency (unlines ["+1", "+1", "-2"]) `shouldBe` Just 0
it "sums the fourth example" $
frequency (unlines ["-1", "-2", "-3"]) `shouldBe` Just (-6)
describe "twiceFrequency" $ do
it "finds the first example" $
twiceFrequency (unlines ["+1", "-2", "+3", "+1"]) `shouldBe` Just 2
it "finds the second example" $
twiceFrequency (unlines ["+1", "-1"]) `shouldBe` Just 0
it "finds the third example" $
twiceFrequency (unlines ["+3", "+3", "+4", "-2", "-4"]) `shouldBe` Just 10
it "finds the fourth example" $
twiceFrequency (unlines ["-6", "+3", "+8", "+5", "-6"]) `shouldBe` Just 5
it "finds the fifth example" $
twiceFrequency (unlines ["+7", "+7", "-2", "-7", "-4"]) `shouldBe` Just 14
| tylerjl/adventofcode | test/Y2018/D01Spec.hs | mit | 1,317 | 0 | 19 | 437 | 422 | 220 | 202 | 26 | 1 |
module Solidran.Prot.DetailSpec (spec) where
import Test.Hspec
import Solidran.Prot.Detail
spec :: Spec
spec = do
describe "Solidran.Prot.Detail" $ do
describe "encode" $ do
it "should work on the example" $ do
encode "AUGGCCAUGGCGCCCAGAACUGAGAUCAAUAGUACCCGUAUUAACGGGUGA"
`shouldBe`
"MAMAPRTEINSTRING\0"
it "should return empty string on empty string" $ do
encode "" `shouldBe` ""
it "should work on a null terminated codon" $ do
encode "UAA" `shouldBe` "\0"
it "should work on two null terminated codons" $ do
encode "UAAUAG" `shouldBe` "\0\0"
encode "CUUUAACCCUAACCA" `shouldBe` "L\0P\0P"
it "should work on other examples" $ do
encode "CUUUUCUCAGCU" `shouldBe` "LFSA"
encode "CCACCACCACCA" `shouldBe` "PPPP"
encode "UCGCCCAGAACC" `shouldBe` "SPRT"
| Jefffrey/Solidran | test/Solidran/Prot/DetailSpec.hs | mit | 997 | 0 | 17 | 346 | 199 | 97 | 102 | 22 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecs-cluster.html
module Stratosphere.Resources.ECSCluster where
import Stratosphere.ResourceImports
-- | Full data type definition for ECSCluster. See 'ecsCluster' for a more
-- convenient constructor.
data ECSCluster =
ECSCluster
{ _eCSClusterClusterName :: Maybe (Val Text)
} deriving (Show, Eq)
instance ToResourceProperties ECSCluster where
toResourceProperties ECSCluster{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::ECS::Cluster"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("ClusterName",) . toJSON) _eCSClusterClusterName
]
}
-- | Constructor for 'ECSCluster' containing required fields as arguments.
ecsCluster
:: ECSCluster
ecsCluster =
ECSCluster
{ _eCSClusterClusterName = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-ecs-cluster.html#cfn-ecs-cluster-clustername
ecscClusterName :: Lens' ECSCluster (Maybe (Val Text))
ecscClusterName = lens _eCSClusterClusterName (\s a -> s { _eCSClusterClusterName = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/ECSCluster.hs | mit | 1,274 | 0 | 14 | 189 | 186 | 108 | 78 | 24 | 1 |
{-
Copyright (C) 2012-2014 John MacFarlane <[email protected]>
Copyright (C) 2014 Tim T.Y. Lin <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Options
Copyright : Copyright (C) 2012-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <[email protected]>
Stability : alpha
Portability : portable
Data structures and functions for representing parser and writer
options.
-}
module Text.Pandoc.Options ( Extension(..)
, pandocExtensions
, strictExtensions
, phpMarkdownExtraExtensions
, githubMarkdownExtensions
, multimarkdownExtensions
, scholarlyMarkdownExtensions
, ReaderOptions(..)
, HTMLMathMethod (..)
, CiteMethod (..)
, ObfuscationMethod (..)
, HTMLSlideVariant (..)
, EPUBVersion (..)
, WriterOptions (..)
, TrackChanges (..)
, def
, isEnabled
) where
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Default
import Text.Pandoc.Highlighting (Style, pygments)
import Text.Pandoc.MediaBag (MediaBag)
import Data.Monoid
-- | Individually selectable syntax extensions.
data Extension =
Ext_footnotes -- ^ Pandoc/PHP/MMD style footnotes
| Ext_inline_notes -- ^ Pandoc-style inline notes
| Ext_pandoc_title_block -- ^ Pandoc title block
| Ext_yaml_metadata_block -- ^ YAML metadata block
| Ext_mmd_title_block -- ^ Multimarkdown metadata block
| Ext_table_captions -- ^ Pandoc-style table captions
| Ext_implicit_figures -- ^ A paragraph with just an image is a figure
| Ext_simple_tables -- ^ Pandoc-style simple tables
| Ext_multiline_tables -- ^ Pandoc-style multiline tables
| Ext_grid_tables -- ^ Grid tables (pandoc, reST)
| Ext_pipe_tables -- ^ Pipe tables (as in PHP markdown extra)
| Ext_citations -- ^ Pandoc/citeproc citations
| Ext_raw_tex -- ^ Allow raw TeX (other than math)
| Ext_raw_html -- ^ Allow raw HTML
| Ext_tex_math_dollars -- ^ TeX math between $..$ or $$..$$
| Ext_tex_math_single_backslash -- ^ TeX math btw \(..\) \[..\]
| Ext_tex_math_double_backslash -- ^ TeX math btw \\(..\\) \\[..\\]
| Ext_latex_macros -- ^ Parse LaTeX macro definitions (for math only)
| Ext_fenced_code_blocks -- ^ Parse fenced code blocks
| Ext_fenced_code_attributes -- ^ Allow attributes on fenced code blocks
| Ext_backtick_code_blocks -- ^ Github style ``` code blocks
| Ext_inline_code_attributes -- ^ Allow attributes on inline code
| Ext_markdown_in_html_blocks -- ^ Interpret as markdown inside HTML blocks
| Ext_native_divs -- ^ Use Div blocks for contents of <div> tags
| Ext_native_spans -- ^ Use Span inlines for contents of <span>
| Ext_markdown_attribute -- ^ Interpret text inside HTML as markdown
-- iff container has attribute 'markdown'
| Ext_escaped_line_breaks -- ^ Treat a backslash at EOL as linebreak
| Ext_link_attributes -- ^ MMD style reference link attributes
| Ext_autolink_bare_uris -- ^ Make all absolute URIs into links
| Ext_fancy_lists -- ^ Enable fancy list numbers and delimiters
| Ext_lists_without_preceding_blankline -- ^ Allow lists without preceding blank
| Ext_startnum -- ^ Make start number of ordered list significant
| Ext_definition_lists -- ^ Definition lists as in pandoc, mmd, php
| Ext_compact_definition_lists -- ^ Definition lists without
-- space between items, and disallow laziness
| Ext_example_lists -- ^ Markdown-style numbered examples
| Ext_all_symbols_escapable -- ^ Make all non-alphanumerics escapable
| Ext_intraword_underscores -- ^ Treat underscore inside word as literal
| Ext_blank_before_blockquote -- ^ Require blank line before a blockquote
| Ext_blank_before_header -- ^ Require blank line before a header
| Ext_strikeout -- ^ Strikeout using ~~this~~ syntax
| Ext_superscript -- ^ Superscript using ^this^ syntax
| Ext_subscript -- ^ Subscript using ~this~ syntax
| Ext_hard_line_breaks -- ^ All newlines become hard line breaks
| Ext_ignore_line_breaks -- ^ Newlines in paragraphs are ignored
| Ext_literate_haskell -- ^ Enable literate Haskell conventions
| Ext_abbreviations -- ^ PHP markdown extra abbreviation definitions
| Ext_auto_identifiers -- ^ Automatic identifiers for headers
| Ext_ascii_identifiers -- ^ ascii-only identifiers for headers
| Ext_header_attributes -- ^ Explicit header attributes {#id .class k=v}
| Ext_mmd_header_identifiers -- ^ Multimarkdown style header identifiers [myid]
| Ext_implicit_header_references -- ^ Implicit reference links for headers
| Ext_line_blocks -- ^ RST style line blocks
| Ext_epub_html_exts -- ^ Recognise the EPUB extended version of HTML
| Ext_scholarly_markdown -- ^ Enables all Scholarly Markdown extensions
deriving (Show, Read, Enum, Eq, Ord, Bounded)
pandocExtensions :: Set Extension
pandocExtensions = Set.fromList
[ Ext_footnotes
, Ext_inline_notes
, Ext_pandoc_title_block
, Ext_yaml_metadata_block
, Ext_table_captions
, Ext_implicit_figures
, Ext_simple_tables
, Ext_multiline_tables
, Ext_grid_tables
, Ext_pipe_tables
, Ext_citations
, Ext_raw_tex
, Ext_raw_html
, Ext_tex_math_dollars
, Ext_latex_macros
, Ext_fenced_code_blocks
, Ext_fenced_code_attributes
, Ext_backtick_code_blocks
, Ext_inline_code_attributes
, Ext_markdown_in_html_blocks
, Ext_native_divs
, Ext_native_spans
, Ext_escaped_line_breaks
, Ext_fancy_lists
, Ext_startnum
, Ext_definition_lists
, Ext_example_lists
, Ext_all_symbols_escapable
, Ext_intraword_underscores
, Ext_blank_before_blockquote
, Ext_blank_before_header
, Ext_strikeout
, Ext_superscript
, Ext_subscript
, Ext_auto_identifiers
, Ext_header_attributes
, Ext_implicit_header_references
, Ext_line_blocks
]
phpMarkdownExtraExtensions :: Set Extension
phpMarkdownExtraExtensions = Set.fromList
[ Ext_footnotes
, Ext_pipe_tables
, Ext_raw_html
, Ext_markdown_attribute
, Ext_fenced_code_blocks
, Ext_definition_lists
, Ext_intraword_underscores
, Ext_header_attributes
, Ext_abbreviations
]
githubMarkdownExtensions :: Set Extension
githubMarkdownExtensions = Set.fromList
[ Ext_pipe_tables
, Ext_raw_html
, Ext_tex_math_single_backslash
, Ext_fenced_code_blocks
, Ext_auto_identifiers
, Ext_ascii_identifiers
, Ext_backtick_code_blocks
, Ext_autolink_bare_uris
, Ext_intraword_underscores
, Ext_strikeout
, Ext_hard_line_breaks
, Ext_lists_without_preceding_blankline
]
multimarkdownExtensions :: Set Extension
multimarkdownExtensions = Set.fromList
[ Ext_pipe_tables
, Ext_raw_html
, Ext_markdown_attribute
, Ext_link_attributes
, Ext_raw_tex
, Ext_tex_math_double_backslash
, Ext_intraword_underscores
, Ext_mmd_title_block
, Ext_footnotes
, Ext_definition_lists
, Ext_all_symbols_escapable
, Ext_implicit_header_references
, Ext_auto_identifiers
, Ext_mmd_header_identifiers
]
scholarlyMarkdownExtensions :: Set Extension
scholarlyMarkdownExtensions = Set.fromList
[ Ext_footnotes
, Ext_inline_notes
, Ext_pandoc_title_block
, Ext_yaml_metadata_block
, Ext_table_captions
, Ext_implicit_figures
, Ext_simple_tables
, Ext_multiline_tables
, Ext_grid_tables
, Ext_pipe_tables
, Ext_citations
, Ext_raw_tex
, Ext_raw_html
, Ext_tex_math_dollars
, Ext_latex_macros
, Ext_fenced_code_blocks
, Ext_fenced_code_attributes
, Ext_backtick_code_blocks
, Ext_inline_code_attributes
, Ext_markdown_in_html_blocks
, Ext_escaped_line_breaks
, Ext_fancy_lists
, Ext_startnum
, Ext_definition_lists
, Ext_example_lists
, Ext_all_symbols_escapable
, Ext_intraword_underscores
, Ext_blank_before_blockquote
, Ext_blank_before_header
, Ext_superscript
, Ext_subscript
, Ext_auto_identifiers
, Ext_header_attributes
, Ext_implicit_header_references
, Ext_line_blocks
, Ext_scholarly_markdown
]
strictExtensions :: Set Extension
strictExtensions = Set.fromList
[ Ext_raw_html ]
data ReaderOptions = ReaderOptions{
readerExtensions :: Set Extension -- ^ Syntax extensions
, readerSmart :: Bool -- ^ Smart punctuation
, readerStandalone :: Bool -- ^ Standalone document with header
, readerParseRaw :: Bool -- ^ Parse raw HTML, LaTeX
, readerColumns :: Int -- ^ Number of columns in terminal
, readerTabStop :: Int -- ^ Tab stop
, readerOldDashes :: Bool -- ^ Use pandoc <= 1.8.2.1 behavior
-- in parsing dashes; -- is em-dash;
-- - before numerial is en-dash
, readerApplyMacros :: Bool -- ^ Apply macros to TeX math
, readerIndentedCodeClasses :: [String] -- ^ Default classes for
-- indented code blocks
, readerDefaultImageExtension :: String -- ^ Default extension for images
, readerAllowUndefinedXRef :: Bool -- ^ Allows x-refs to be undefined
, readerTrace :: Bool -- ^ Print debugging info
, readerTrackChanges :: TrackChanges
} deriving (Show, Read)
instance Default ReaderOptions
where def = ReaderOptions{
readerExtensions = pandocExtensions
, readerSmart = False
, readerStandalone = False
, readerParseRaw = False
, readerColumns = 80
, readerTabStop = 4
, readerOldDashes = False
, readerApplyMacros = True
, readerIndentedCodeClasses = []
, readerDefaultImageExtension = ""
, readerAllowUndefinedXRef = False
, readerTrace = False
, readerTrackChanges = AcceptChanges
}
--
-- Writer options
--
data EPUBVersion = EPUB2 | EPUB3 deriving (Eq, Show, Read)
data HTMLMathMethod = PlainMath
| LaTeXMathML (Maybe String) -- url of LaTeXMathML.js
| JsMath (Maybe String) -- url of jsMath load script
| GladTeX
| WebTeX String -- url of TeX->image script.
| MathML (Maybe String) -- url of MathMLinHTML.js
| MathJax String -- url of MathJax.js
| KaTeX String String -- url of stylesheet and katex.js
deriving (Show, Read, Eq)
data CiteMethod = Citeproc -- use citeproc to render them
| Natbib -- output natbib cite commands
| Biblatex -- output biblatex cite commands
deriving (Show, Read, Eq)
-- | Methods for obfuscating email addresses in HTML.
data ObfuscationMethod = NoObfuscation
| ReferenceObfuscation
| JavascriptObfuscation
deriving (Show, Read, Eq)
-- | Varieties of HTML slide shows.
data HTMLSlideVariant = S5Slides
| SlidySlides
| SlideousSlides
| DZSlides
| RevealJsSlides
| NoSlides
deriving (Show, Read, Eq)
-- | Options for accepting or rejecting MS Word track-changes.
data TrackChanges = AcceptChanges
| RejectChanges
| AllChanges
deriving (Show, Read, Eq)
-- | Options for writers
data WriterOptions = WriterOptions
{ writerStandalone :: Bool -- ^ Include header and footer
, writerTemplate :: String -- ^ Template to use in standalone mode
, writerVariables :: [(String, String)] -- ^ Variables to set in template
, writerTabStop :: Int -- ^ Tabstop for conversion btw spaces and tabs
, writerTableOfContents :: Bool -- ^ Include table of contents
, writerSlideVariant :: HTMLSlideVariant -- ^ Are we writing S5, Slidy or Slideous?
, writerIncremental :: Bool -- ^ True if lists should be incremental
, writerHTMLMathMethod :: HTMLMathMethod -- ^ How to print math in HTML
, writerIgnoreNotes :: Bool -- ^ Ignore footnotes (used in making toc)
, writerNumberSections :: Bool -- ^ Number sections in LaTeX
, writerNumberOffset :: [Int] -- ^ Starting number for section, subsection, ...
, writerSectionDivs :: Bool -- ^ Put sections in div tags in HTML
, writerExtensions :: Set Extension -- ^ Markdown extensions that can be used
, writerReferenceLinks :: Bool -- ^ Use reference links in writing markdown, rst
, writerWrapText :: Bool -- ^ Wrap text to line length
, writerColumns :: Int -- ^ Characters in a line (for text wrapping)
, writerEmailObfuscation :: ObfuscationMethod -- ^ How to obfuscate emails
, writerIdentifierPrefix :: String -- ^ Prefix for section & note ids in HTML
-- and for footnote marks in markdown
, writerSourceURL :: Maybe String -- ^ Absolute URL + directory of 1st source file
, writerUserDataDir :: Maybe FilePath -- ^ Path of user data directory
, writerCiteMethod :: CiteMethod -- ^ How to print cites
, writerHtml5 :: Bool -- ^ Produce HTML5
, writerHtmlQTags :: Bool -- ^ Use @<q>@ tags for quotes in HTML
, writerBeamer :: Bool -- ^ Produce beamer LaTeX slide show
, writerSlideLevel :: Maybe Int -- ^ Force header level of slides
, writerChapters :: Bool -- ^ Use "chapter" for top-level sects
, writerBook :: Bool -- ^ The intended document is a book-type
, writerListings :: Bool -- ^ Use listings package for code
, writerHighlight :: Bool -- ^ Highlight source code
, writerHighlightStyle :: Style -- ^ Style to use for highlighting
, writerSetextHeaders :: Bool -- ^ Use setext headers for levels 1-2 in markdown
, writerTeXLigatures :: Bool -- ^ Use tex ligatures quotes, dashes in latex
, writerEpubVersion :: Maybe EPUBVersion -- ^ Nothing or EPUB version
, writerEpubMetadata :: String -- ^ Metadata to include in EPUB
, writerEpubStylesheet :: Maybe String -- ^ EPUB stylesheet specified at command line
, writerEpubFonts :: [FilePath] -- ^ Paths to fonts to embed
, writerEpubChapterLevel :: Int -- ^ Header level for chapters (separate files)
, writerTOCDepth :: Int -- ^ Number of levels to include in TOC
, writerReferenceODT :: Maybe FilePath -- ^ Path to reference ODT if specified
, writerReferenceDocx :: Maybe FilePath -- ^ Path to reference DOCX if specified
, writerMediaBag :: MediaBag -- ^ Media collected by docx or epub reader
, writerScholarly :: Bool -- ^ Rendering a ScholMD document
, writerVerbose :: Bool -- ^ Verbose debugging output
} deriving Show
instance Default WriterOptions where
def = WriterOptions { writerStandalone = False
, writerTemplate = ""
, writerVariables = []
, writerTabStop = 4
, writerTableOfContents = False
, writerSlideVariant = NoSlides
, writerIncremental = False
, writerHTMLMathMethod = PlainMath
, writerIgnoreNotes = False
, writerNumberSections = False
, writerNumberOffset = [0,0,0,0,0,0]
, writerSectionDivs = False
, writerExtensions = pandocExtensions
, writerReferenceLinks = False
, writerWrapText = True
, writerColumns = 72
, writerEmailObfuscation = JavascriptObfuscation
, writerIdentifierPrefix = ""
, writerSourceURL = Nothing
, writerUserDataDir = Nothing
, writerCiteMethod = Citeproc
, writerHtml5 = False
, writerHtmlQTags = False
, writerBeamer = False
, writerSlideLevel = Nothing
, writerChapters = False
, writerBook = False
, writerListings = False
, writerHighlight = False
, writerHighlightStyle = pygments
, writerSetextHeaders = True
, writerTeXLigatures = True
, writerEpubVersion = Nothing
, writerEpubMetadata = ""
, writerEpubStylesheet = Nothing
, writerEpubFonts = []
, writerEpubChapterLevel = 1
, writerTOCDepth = 3
, writerReferenceODT = Nothing
, writerReferenceDocx = Nothing
, writerMediaBag = mempty
, writerScholarly = False
, writerVerbose = False
}
-- | Returns True if the given extension is enabled.
isEnabled :: Extension -> WriterOptions -> Bool
isEnabled ext opts = ext `Set.member` (writerExtensions opts)
| timtylin/scholdoc | src/Text/Pandoc/Options.hs | gpl-2.0 | 19,237 | 0 | 10 | 6,282 | 1,976 | 1,298 | 678 | 351 | 1 |
module Report(Report,
report,
reportName,
writeReportHtml,
intBarPlotComp,
strListComp) where
import Control.Monad
import Text.Blaze.Html5 as H
import Text.Blaze.Html5.Attributes as A
import Plot
import Utils
data Report
= Report String [ReportComponent]
deriving (Eq, Ord, Show)
report n cs = Report n cs
reportName (Report n _) = n
data ReportComponent
= IntBarPlot String String [(Int, Int)]
| StrList String [String]
deriving (Eq, Ord, Show)
intBarPlotComp n seriesName sData = IntBarPlot n seriesName sData
strListComp n strs = StrList n strs
reportComponentToHtml :: String -> ReportComponent -> IO Html
reportComponentToHtml filePath (IntBarPlot pName pSeriesName pData) = do
gChartToSVG (filePath ++ "/charts") (barChart pName pSeriesName pData)
return $ chartHtml pName "alt tag"
reportComponentToHtml filePath (StrList lName lItems) =
return $ stringListToHtml lName lItems
compRCHtml :: String -> Html -> ReportComponent -> IO Html
compRCHtml filePath ht rc = do
nextHt <- reportComponentToHtml filePath rc
return $ do { ht ; nextHt }
reportToHtml :: String -> Report -> IO Html
reportToHtml topDirPath (Report n comps) =
let docFront = docTypeHtml $ do
H.head $ do
H.title $ toHtml n
body $ do
toHtml n in
foldM (compRCHtml topDirPath) docFront comps
writeReportHtml :: String -> Report -> IO ()
writeReportHtml topDirPath rep@(Report n _) = do
repHtml <- reportToHtml topDirPath rep
writeHtmlToFile (topDirPath ++ "/" ++ n) repHtml
| dillonhuff/GitVisualizer | src/Report.hs | gpl-2.0 | 1,596 | 5 | 15 | 363 | 519 | 265 | 254 | 44 | 1 |
import Core
import Functions
import System.Console.Haskeline
import System.Random
import Data.List
header = do
putStrLn "MathMagic shell"
putStrLn "A REPL calculator for non conventional arithmetic (vedic, trachtenberg, ...)"
putStrLn "GNU 3.0 Licence (TODO)"
putStrLn ""
main::IO()
main = do
header
putStrLn fromInteger $ calculate(Add (Number 1) (Subtract (Number 10) (Number 3)))
| oceanborn2/experiments | proglang/haskell/mathmagic/src/Main.hs | gpl-2.0 | 427 | 1 | 14 | 94 | 121 | 56 | 65 | 14 | 1 |
{-# LANGUAGE DeriveDataTypeable, MultiParamTypeClasses #-}
module Baum.Leftist
( make_fixed, make_quiz
)
where
-- $Id$
import Baum.Leftist.Type
import Baum.Leftist.Ops
import Baum.Leftist.Show
import qualified Baum.Heap.Class as C
import qualified Baum.Heap.Central
import qualified Tree as T
import Autolib.ToDoc
import Inter.Types
import Data.Typeable
instance Show a => T.ToTree ( LeftistTree a ) where
toTree = toTree . fmap show
data HeapbaumLeftist = HeapbaumLeftist
deriving ( Eq, Ord, Show, Read, Typeable )
instance C.Tag HeapbaumLeftist LeftistTree Int where
tag = HeapbaumLeftist
make_fixed :: Make
make_fixed = Baum.Heap.Central.make_fixed HeapbaumLeftist
make_quiz :: Make
make_quiz = Baum.Heap.Central.make_quiz HeapbaumLeftist
| Erdwolf/autotool-bonn | src/Baum/Leftist.hs | gpl-2.0 | 809 | 0 | 7 | 157 | 189 | 112 | 77 | 22 | 1 |
{- |
mtlstats
Copyright (C) 1984, 1985, 2019, 2020, 2021 Rhéal Lamothe
<[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
-}
{-# LANGUAGE TemplateHaskell #-}
module Mtlstats.Types.Menu (
-- * Types
Menu (..),
MenuItem (..),
-- * Lenses
-- ** Menu Lenses
menuTitle,
menuDefault,
menuItems,
-- ** MenuItem Lenses
miKey,
miDescription,
miAction,
) where
import Lens.Micro ((^.))
import Lens.Micro.TH (makeLenses)
import Mtlstats.Format
import Mtlstats.Types
-- | Defines a menu
data Menu a = Menu
{ _menuTitle :: String
-- ^ The menu title
, _menuDefault :: a
-- ^ The value to return on incorrect selection or other event
, _menuItems :: [MenuItem a]
-- ^ The list of items in the menu
}
-- | Defines a menu item
data MenuItem a = MenuItem
{ _miKey :: Char
-- ^ The key that selects the menu item
, _miDescription :: String
-- ^ The description of the menu item
, _miAction :: Action a
-- ^ The action to be performed when selected
}
makeLenses ''Menu
makeLenses ''MenuItem
instance Show (Menu a) where
show m = unlines
$ [ m^.menuTitle
, ""
]
++ body
where
body = map (left width) items
width = maximum $ map length items
items = map show $ m^.menuItems
instance Show (MenuItem a) where
show i = [i ^. miKey] ++ ": " ++ i ^. miDescription
| mtlstats/mtlstats | src/Mtlstats/Types/Menu.hs | gpl-3.0 | 1,958 | 0 | 10 | 451 | 299 | 176 | 123 | 34 | 0 |
-- Print out the nth prime, where n is the 1st argument
module Main where
import ReinkePrimes (primes)
import System (getArgs)
printNthPrime :: Int -> IO ()
printNthPrime n = print (n, primes !! (n - 1))
main = do
args <- getArgs
printNthPrime $ read $ head args
| dkensinger/haskell | haskell-primes/ReinkePrimesTest.hs | gpl-3.0 | 276 | 0 | 9 | 62 | 89 | 48 | 41 | 8 | 1 |
-- sth-translit: transliterate characters on stdin
module Main where
import System.Environment (getArgs)
import System.Exit (exitSuccess, exitFailure)
import STH.Lib
(charFilter, applyListMap, padLast,
readCharSeq, reportErrorMsgs, bsUnEsc)
main :: IO ()
main = do
args <- getArgs
(from,to) <- case map (readCharSeq . bsUnEsc) args of
[Just as] -> return (as, "")
[Just as, Just bs] -> return (as, bs)
otherwise -> argError
let
remove = filter (not . (`elem` from))
translit = map (applyListMap $ zip from (padLast to))
case to of
"" -> charFilter remove
otherwise -> charFilter translit
exitSuccess
argError :: IO a
argError = reportErrorMsgs
[ "usage:"
, " translit [FROM] [TO] -- replace chars in FROM by those in TO"
, " translit [REMOVE] -- remove chars in REMOVE"
] >> exitFailure
| nbloomf/st-haskell | src/STH/Translit/Main.hs | gpl-3.0 | 882 | 0 | 15 | 219 | 266 | 143 | 123 | 26 | 4 |
module ModelRender where
import Data.Foldable (for_, traverse_)
import Data.StateVar
import Graphics.Rendering.OpenGL.GL.VertexArrays (Capability(..))
import qualified Graphics.Rendering.OpenGL as GL
import Model
import Primitive
import Types
import qualified GLColor
--------------------------------------------------------------------------------
renderModel :: [PlacedPart] -> IO ()
renderModel = traverse_ (renderPart renderPrimBrick)
renderModelWireframe :: Foldable t => t PlacedPart -> IO ()
renderModelWireframe = traverse_ (renderPart renderPrimWireframe)
renderWithUniqColors :: Renderer a -> [a] -> IO ()
renderWithUniqColors renderer xs =
for_ (zip [zero ..] xs) $ \(color, part) ->
renderer (const $ renderPrimShape color) part
where
zero = toEnum 0 :: Color3 GLubyte
--------------------------------------------------------------------------------
type Renderer a = PrimRenderer (Color3 GLfloat) -> a -> IO ()
renderPart :: Renderer PlacedPart
renderPart renderer = traverse_ (renderPlaced renderer)
renderPlaced :: Renderer (Placed Prim)
renderPlaced renderPrim (Placed p c a) =
GL.preservingMatrix $ do
renderPlacement p
-- This translation is a hack to make world coordinates refer to
-- the center of the position rather than its lower left
-- corner. Otherwise, rotations would never include the same
-- position.
GL.translate (vector3f (-1) (-1) 0)
renderPrim (toGLColor c) a
--------------------------------------------------------------------------------
type PrimRenderer color = color -> Prim -> IO ()
renderPrimBrick :: GL.Color c => PrimRenderer c
renderPrimBrick color a = do
renderPrimWireframe GLColor.wireframe a
renderPrimShape color a
renderPrimShape :: GL.Color c => PrimRenderer c
renderPrimShape color a = do
GL.polygonMode $= (GL.Fill, GL.Fill)
GL.polygonOffset $= (1,1)
GL.polygonOffsetFill $= Enabled
GL.color color
Primitive.render a
GL.polygonOffsetFill $= Disabled
renderPrimWireframe :: GL.Color c => PrimRenderer c
renderPrimWireframe color a = do
GL.polygonMode $= (GL.Line, GL.Line)
GL.color color
Primitive.render a
--------------------------------------------------------------------------------
renderPlacement :: Placement -> IO ()
renderPlacement (Placement p r) = do
renderPosition p
renderRotation r
renderPosition :: P3 -> IO ()
renderPosition (P3 x y z) =
GL.translate (vector3f (2 * fromIntegral x) (2 * fromIntegral y) (fromIntegral z))
renderRotation :: Rotation -> IO ()
renderRotation (Rotation r) = GL.rotate (90 * fromIntegral r) (vector3f 0 0 1)
--------------------------------------------------------------------------------
toGLColor :: Color -> Color3 GLfloat
toGLColor Black = GLColor.black
toGLColor Blue = GLColor.blue
toGLColor Brown = GLColor.brown
toGLColor DarkBlue = GLColor.darkBlue
toGLColor DarkGray = GLColor.darkGray
toGLColor DarkGreen = GLColor.darkGreen
toGLColor Gray = GLColor.gray
toGLColor Green = GLColor.green
toGLColor LightBlue = GLColor.lightBlue
toGLColor Red = GLColor.red
toGLColor Tan = GLColor.tan
toGLColor White = GLColor.white
toGLColor Yellow = GLColor.yellow
--------------------------------------------------------------------------------
vector3f x y z = Vector3 x y z :: Vector3 GLdouble
vertex3f x y z = Vertex3 x y z :: Vertex3 GLdouble
renderAxis :: GLdouble -> IO ()
renderAxis s =
GL.renderPrimitive GL.Lines $ do
GL.color (Color3 1 0 0 :: Color3 GLfloat)
GL.vertex $ vertex3f 0 0 0
GL.vertex $ vertex3f s 0 0
GL.color (Color3 0 1 0 :: Color3 GLfloat)
GL.vertex $ vertex3f 0 0 0
GL.vertex $ vertex3f 0 s 0
GL.color (Color3 0 0 1 :: Color3 GLfloat)
GL.vertex $ vertex3f 0 0 0
GL.vertex $ vertex3f 0 0 s
| holmisen/glbrix | src/ModelRender.hs | gpl-3.0 | 3,824 | 0 | 12 | 680 | 1,162 | 577 | 585 | 82 | 1 |
module Colouring
(fullRNG
)where
import DataTypes
-- | function to produce seemingly random ints
-- useage: rng(min int, max int) seed => pseudo-random int
rng :: (Int,Int) -> Int -> Int
rng (min,max) seed = ((seed ^ 20) `mod` 2339 `mod` max) + min
-- | A function which makes a Figure a FullFigure using a seed to produce the colour randomly
-- useage: fullRNG seed figureToChange => randomColourFigure
fullRNG :: Int -> Figure -> FullFigure
fullRNG seed [] = []
fullRNG seed (x:xs) = ((genFill seed),(genOutline (seed + 10)),x):(fullRNG (seed + 11) xs)
genFill seed = (r1,r2,r3,r4,r5,r6) where
[r1,r2,r3,r4,r5,r6] = genList (0,15) 6 seed
genOutline seed = (r1,r2,r3) where
[r1,r2,r3] = genList (0,255) 3 seed
genList :: (Int,Int) -> Int -> Int -> [Int]
genList mm len seed = if len == 0
then []
else (rng mm seed):(genList mm (len - 1) (seed + 1))
| Lexer747/Haskell-Fractals | Core/Colouring.hs | gpl-3.0 | 885 | 0 | 10 | 182 | 368 | 211 | 157 | 16 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetSSLProxies.SetSSLCertificates
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Changes SslCertificates for TargetSslProxy.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetSslProxies.setSslCertificates@.
module Network.Google.Resource.Compute.TargetSSLProxies.SetSSLCertificates
(
-- * REST Resource
TargetSSLProxiesSetSSLCertificatesResource
-- * Creating a Request
, targetSSLProxiesSetSSLCertificates
, TargetSSLProxiesSetSSLCertificates
-- * Request Lenses
, tspsscProject
, tspsscPayload
, tspsscTargetSSLProxy
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetSslProxies.setSslCertificates@ method which the
-- 'TargetSSLProxiesSetSSLCertificates' request conforms to.
type TargetSSLProxiesSetSSLCertificatesResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"global" :>
"targetSslProxies" :>
Capture "targetSslProxy" Text :>
"setSslCertificates" :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON]
TargetSSLProxiesSetSSLCertificatesRequest
:> Post '[JSON] Operation
-- | Changes SslCertificates for TargetSslProxy.
--
-- /See:/ 'targetSSLProxiesSetSSLCertificates' smart constructor.
data TargetSSLProxiesSetSSLCertificates = TargetSSLProxiesSetSSLCertificates'
{ _tspsscProject :: !Text
, _tspsscPayload :: !TargetSSLProxiesSetSSLCertificatesRequest
, _tspsscTargetSSLProxy :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TargetSSLProxiesSetSSLCertificates' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tspsscProject'
--
-- * 'tspsscPayload'
--
-- * 'tspsscTargetSSLProxy'
targetSSLProxiesSetSSLCertificates
:: Text -- ^ 'tspsscProject'
-> TargetSSLProxiesSetSSLCertificatesRequest -- ^ 'tspsscPayload'
-> Text -- ^ 'tspsscTargetSSLProxy'
-> TargetSSLProxiesSetSSLCertificates
targetSSLProxiesSetSSLCertificates pTspsscProject_ pTspsscPayload_ pTspsscTargetSSLProxy_ =
TargetSSLProxiesSetSSLCertificates'
{ _tspsscProject = pTspsscProject_
, _tspsscPayload = pTspsscPayload_
, _tspsscTargetSSLProxy = pTspsscTargetSSLProxy_
}
-- | Project ID for this request.
tspsscProject :: Lens' TargetSSLProxiesSetSSLCertificates Text
tspsscProject
= lens _tspsscProject
(\ s a -> s{_tspsscProject = a})
-- | Multipart request metadata.
tspsscPayload :: Lens' TargetSSLProxiesSetSSLCertificates TargetSSLProxiesSetSSLCertificatesRequest
tspsscPayload
= lens _tspsscPayload
(\ s a -> s{_tspsscPayload = a})
-- | Name of the TargetSslProxy resource whose SslCertificate resource is to
-- be set.
tspsscTargetSSLProxy :: Lens' TargetSSLProxiesSetSSLCertificates Text
tspsscTargetSSLProxy
= lens _tspsscTargetSSLProxy
(\ s a -> s{_tspsscTargetSSLProxy = a})
instance GoogleRequest
TargetSSLProxiesSetSSLCertificates where
type Rs TargetSSLProxiesSetSSLCertificates =
Operation
type Scopes TargetSSLProxiesSetSSLCertificates =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient TargetSSLProxiesSetSSLCertificates'{..}
= go _tspsscProject _tspsscTargetSSLProxy
(Just AltJSON)
_tspsscPayload
computeService
where go
= buildClient
(Proxy ::
Proxy TargetSSLProxiesSetSSLCertificatesResource)
mempty
| rueshyna/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetSSLProxies/SetSSLCertificates.hs | mpl-2.0 | 4,610 | 0 | 17 | 1,036 | 473 | 282 | 191 | 85 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.TargetPools.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves a list of target pools available to the specified project and
-- region.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.targetPools.list@.
module Network.Google.Resource.Compute.TargetPools.List
(
-- * REST Resource
TargetPoolsListResource
-- * Creating a Request
, targetPoolsList
, TargetPoolsList
-- * Request Lenses
, tplReturnPartialSuccess
, tplOrderBy
, tplProject
, tplFilter
, tplRegion
, tplPageToken
, tplMaxResults
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.targetPools.list@ method which the
-- 'TargetPoolsList' request conforms to.
type TargetPoolsListResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetPools" :>
QueryParam "returnPartialSuccess" Bool :>
QueryParam "orderBy" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] TargetPoolList
-- | Retrieves a list of target pools available to the specified project and
-- region.
--
-- /See:/ 'targetPoolsList' smart constructor.
data TargetPoolsList =
TargetPoolsList'
{ _tplReturnPartialSuccess :: !(Maybe Bool)
, _tplOrderBy :: !(Maybe Text)
, _tplProject :: !Text
, _tplFilter :: !(Maybe Text)
, _tplRegion :: !Text
, _tplPageToken :: !(Maybe Text)
, _tplMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'TargetPoolsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tplReturnPartialSuccess'
--
-- * 'tplOrderBy'
--
-- * 'tplProject'
--
-- * 'tplFilter'
--
-- * 'tplRegion'
--
-- * 'tplPageToken'
--
-- * 'tplMaxResults'
targetPoolsList
:: Text -- ^ 'tplProject'
-> Text -- ^ 'tplRegion'
-> TargetPoolsList
targetPoolsList pTplProject_ pTplRegion_ =
TargetPoolsList'
{ _tplReturnPartialSuccess = Nothing
, _tplOrderBy = Nothing
, _tplProject = pTplProject_
, _tplFilter = Nothing
, _tplRegion = pTplRegion_
, _tplPageToken = Nothing
, _tplMaxResults = 500
}
-- | Opt-in for partial success behavior which provides partial results in
-- case of failure. The default value is false.
tplReturnPartialSuccess :: Lens' TargetPoolsList (Maybe Bool)
tplReturnPartialSuccess
= lens _tplReturnPartialSuccess
(\ s a -> s{_tplReturnPartialSuccess = a})
-- | Sorts list results by a certain order. By default, results are returned
-- in alphanumerical order based on the resource name. You can also sort
-- results in descending order based on the creation timestamp using
-- \`orderBy=\"creationTimestamp desc\"\`. This sorts results based on the
-- \`creationTimestamp\` field in reverse chronological order (newest
-- result first). Use this to sort resources like operations so that the
-- newest operation is returned first. Currently, only sorting by \`name\`
-- or \`creationTimestamp desc\` is supported.
tplOrderBy :: Lens' TargetPoolsList (Maybe Text)
tplOrderBy
= lens _tplOrderBy (\ s a -> s{_tplOrderBy = a})
-- | Project ID for this request.
tplProject :: Lens' TargetPoolsList Text
tplProject
= lens _tplProject (\ s a -> s{_tplProject = a})
-- | A filter expression that filters resources listed in the response. The
-- expression must specify the field name, a comparison operator, and the
-- value that you want to use for filtering. The value must be a string, a
-- number, or a boolean. The comparison operator must be either \`=\`,
-- \`!=\`, \`>\`, or \`\<\`. For example, if you are filtering Compute
-- Engine instances, you can exclude instances named \`example-instance\`
-- by specifying \`name != example-instance\`. You can also filter nested
-- fields. For example, you could specify \`scheduling.automaticRestart =
-- false\` to include instances only if they are not scheduled for
-- automatic restarts. You can use filtering on nested fields to filter
-- based on resource labels. To filter on multiple expressions, provide
-- each separate expression within parentheses. For example: \`\`\`
-- (scheduling.automaticRestart = true) (cpuPlatform = \"Intel Skylake\")
-- \`\`\` By default, each expression is an \`AND\` expression. However,
-- you can include \`AND\` and \`OR\` expressions explicitly. For example:
-- \`\`\` (cpuPlatform = \"Intel Skylake\") OR (cpuPlatform = \"Intel
-- Broadwell\") AND (scheduling.automaticRestart = true) \`\`\`
tplFilter :: Lens' TargetPoolsList (Maybe Text)
tplFilter
= lens _tplFilter (\ s a -> s{_tplFilter = a})
-- | Name of the region scoping this request.
tplRegion :: Lens' TargetPoolsList Text
tplRegion
= lens _tplRegion (\ s a -> s{_tplRegion = a})
-- | Specifies a page token to use. Set \`pageToken\` to the
-- \`nextPageToken\` returned by a previous list request to get the next
-- page of results.
tplPageToken :: Lens' TargetPoolsList (Maybe Text)
tplPageToken
= lens _tplPageToken (\ s a -> s{_tplPageToken = a})
-- | The maximum number of results per page that should be returned. If the
-- number of available results is larger than \`maxResults\`, Compute
-- Engine returns a \`nextPageToken\` that can be used to get the next page
-- of results in subsequent list requests. Acceptable values are \`0\` to
-- \`500\`, inclusive. (Default: \`500\`)
tplMaxResults :: Lens' TargetPoolsList Word32
tplMaxResults
= lens _tplMaxResults
(\ s a -> s{_tplMaxResults = a})
. _Coerce
instance GoogleRequest TargetPoolsList where
type Rs TargetPoolsList = TargetPoolList
type Scopes TargetPoolsList =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/compute.readonly"]
requestClient TargetPoolsList'{..}
= go _tplProject _tplRegion _tplReturnPartialSuccess
_tplOrderBy
_tplFilter
_tplPageToken
(Just _tplMaxResults)
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy TargetPoolsListResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/TargetPools/List.hs | mpl-2.0 | 7,437 | 0 | 20 | 1,668 | 833 | 497 | 336 | 120 | 1 |
module Handler.HonorPledge where
import Import
import Model.User (establishUserDB, curUserIsEligibleEstablish)
getHonorPledgeR :: Handler Html
getHonorPledgeR = do
is_elig <- curUserIsEligibleEstablish
defaultLayout $ do
snowdriftTitle "Honor Pledge"
$(widgetFile "honor-pledge")
postHonorPledgeR :: Handler Html
postHonorPledgeR = do
Entity user_id user <- requireAuth
case userEstablished user of
EstEligible elig_time reason -> do
runDB $ establishUserDB user_id elig_time reason
setMessage "Congratulations, you are now a fully established user!"
redirect HomeR
EstEstablished{} -> error "You're already an established user."
_ -> error "You're not eligible for establishment."
| chreekat/snowdrift | Handler/HonorPledge.hs | agpl-3.0 | 778 | 0 | 13 | 177 | 159 | 74 | 85 | 19 | 3 |
filter' :: (a -> Bool) -> [a] -> [a]
filter' _ [] = []
filter' p (x:xs)
| p x = x : filter' p xs
| otherwise = filter' p xs
-- filter' (>=0) [-2,-1,0,1,2] -- [0,1,2]
-- [x | x <- [-2..2], x >= 0] -- [0,1,2]
largestDivisible :: Integer
largestDivisible = head (filter p [100000,99999..])
where p x = x `mod` 3829 == 0
-- largestDivisible -- 99554
| mikoim/musor | haskell/Learn You a Haskell for Great Good!/filter.hs | unlicense | 362 | 0 | 8 | 87 | 140 | 73 | 67 | 8 | 1 |
module ReplaceExperiment where
replaceWithP :: b -> Char
replaceWithP = const 'p'
lms :: [Maybe [Char]]
lms = [Just "Ave", Nothing, Just "woohoo"]
-- Just making the arguement more specific
replaceWithP' :: [Maybe [Char]] -> Char
replaceWithP' = replaceWithP
-- Prelude> :t fmap replaceWithP
-- fmap replaceWithP :: Functor f => f a -> f Char
liftedReplace :: Functor f => f a -> f Char
liftedReplace = fmap replaceWithP
liftedReplace' :: [Maybe [Char]] -> [Char]
liftedReplace' = liftedReplace
-- Prelude> :t (fmap . fmap) replaceWithP
-- (fmap . fmap) replaceWithP
-- :: (Functor f1, Functor f)
-- => f (fa a) -> f (f1 Char)
twiceLifted :: (Functor f1, Functor f) =>
f (f1 a) -> f (f1 Char)
twiceLifted = (fmap . fmap) replaceWithP
-- Making it more specific
twiceLifted' :: [Maybe [Char]] -> [Maybe Char]
twiceLifted' = twiceLifted
-- f ~ []
-- f1 ~ Maybe
-- Prelude> let rWP = replaceWithP
-- Prelude> :t (fmap . fmap . fmap) rWP
-- (fmap . fmap . fmap) replaceWithP
-- :: (Functor f2, Functor f1, functor f)
-- => f (f1 (f2 a)) -> f (f1 (f2 Char))
thriceLifted :: (Functor f2, Functor f1, Functor f)
=> f (f1 (f2 a)) -> f (f1 (f2 Char))
thriceLifted = (fmap . fmap . fmap) replaceWithP
-- More specific or "concrete"
thriceLifted' :: [Maybe [Char]] -> [Maybe [Char]]
thriceLifted' = thriceLifted
-- f ~ []
-- f1 ~ Maybe
-- f2 ~ []
main :: IO ()
main = do
putStr "replaceWithP' lms: "
print (replaceWithP' lms)
putStr "liftedReplace lms: "
print (liftedReplace lms)
putStr "liftedReplace' lms: "
print (liftedReplace' lms)
putStr "twiceLifted lms: "
print (twiceLifted lms)
putStr "twiceLifted' lms: "
print (twiceLifted' lms)
putStr "thriceLifted lms: "
print (thriceLifted lms)
putStr "thriceLifted' lms: "
print (thriceLifted' lms)
| dmp1ce/Haskell-Programming-Exercises | Chapter 16/ReplaceExperiment.hs | unlicense | 1,843 | 0 | 11 | 403 | 491 | 252 | 239 | 37 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QPrinter_h.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:25
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Gui.QPrinter_h where
import Foreign.C.Types
import Qtc.Enums.Base
import Qtc.Enums.Gui.QPaintDevice
import Qtc.Classes.Base
import Qtc.Classes.Qccs_h
import Qtc.Classes.Core_h
import Qtc.ClassTypes.Core
import Qth.ClassTypes.Core
import Qtc.Classes.Gui_h
import Qtc.ClassTypes.Gui
import Foreign.Marshal.Array
instance QunSetUserMethod (QPrinter ()) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QPrinter_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
foreign import ccall "qtc_QPrinter_unSetUserMethod" qtc_QPrinter_unSetUserMethod :: Ptr (TQPrinter a) -> CInt -> CInt -> IO (CBool)
instance QunSetUserMethod (QPrinterSc a) where
unSetUserMethod qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QPrinter_unSetUserMethod cobj_qobj (toCInt 0) (toCInt evid)
instance QunSetUserMethodVariant (QPrinter ()) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QPrinter_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariant (QPrinterSc a) where
unSetUserMethodVariant qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QPrinter_unSetUserMethod cobj_qobj (toCInt 1) (toCInt evid)
instance QunSetUserMethodVariantList (QPrinter ()) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QPrinter_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QunSetUserMethodVariantList (QPrinterSc a) where
unSetUserMethodVariantList qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
qtc_QPrinter_unSetUserMethod cobj_qobj (toCInt 2) (toCInt evid)
instance QsetUserMethod (QPrinter ()) (QPrinter x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QPrinter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QPrinter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QPrinter_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QPrinter_setUserMethod" qtc_QPrinter_setUserMethod :: Ptr (TQPrinter a) -> CInt -> Ptr (Ptr (TQPrinter x0) -> IO ()) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethod_QPrinter :: (Ptr (TQPrinter x0) -> IO ()) -> IO (FunPtr (Ptr (TQPrinter x0) -> IO ()))
foreign import ccall "wrapper" wrapSetUserMethod_QPrinter_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QPrinterSc a) (QPrinter x0 -> IO ()) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethod_QPrinter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethod_QPrinter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QPrinter_setUserMethod cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> IO ()
setHandlerWrapper x0
= do
x0obj <- objectFromPtr_nf x0
if (objectIsNull x0obj)
then return ()
else _handler x0obj
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QsetUserMethod (QPrinter ()) (QPrinter x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QPrinter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QPrinter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QPrinter_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QPrinter_setUserMethodVariant" qtc_QPrinter_setUserMethodVariant :: Ptr (TQPrinter a) -> CInt -> Ptr (Ptr (TQPrinter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetUserMethodVariant_QPrinter :: (Ptr (TQPrinter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))) -> IO (FunPtr (Ptr (TQPrinter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))))
foreign import ccall "wrapper" wrapSetUserMethodVariant_QPrinter_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetUserMethod (QPrinterSc a) (QPrinter x0 -> QVariant () -> IO (QVariant ())) where
setUserMethod _eobj _eid _handler
= do
funptr <- wrapSetUserMethodVariant_QPrinter setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetUserMethodVariant_QPrinter_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
qtc_QPrinter_setUserMethodVariant cobj_eobj (toCInt _eid) (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return ()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> Ptr (TQVariant ()) -> IO (Ptr (TQVariant ()))
setHandlerWrapper x0 x1
= do
x0obj <- objectFromPtr_nf x0
x1obj <- objectFromPtr_nf x1
rv <- if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj x1obj
withObjectPtr rv $ \cobj_rv -> return cobj_rv
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QunSetHandler (QPrinter ()) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QPrinter_unSetHandler cobj_qobj cstr_evid
foreign import ccall "qtc_QPrinter_unSetHandler" qtc_QPrinter_unSetHandler :: Ptr (TQPrinter a) -> CWString -> IO (CBool)
instance QunSetHandler (QPrinterSc a) where
unSetHandler qobj evid
= withBoolResult $
withObjectPtr qobj $ \cobj_qobj ->
withCWString evid $ \cstr_evid ->
qtc_QPrinter_unSetHandler cobj_qobj cstr_evid
instance QsetHandler (QPrinter ()) (QPrinter x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QPrinter1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QPrinter1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QPrinter_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QPrinter_setHandler1" qtc_QPrinter_setHandler1 :: Ptr (TQPrinter a) -> CWString -> Ptr (Ptr (TQPrinter x0) -> IO (CInt)) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QPrinter1 :: (Ptr (TQPrinter x0) -> IO (CInt)) -> IO (FunPtr (Ptr (TQPrinter x0) -> IO (CInt)))
foreign import ccall "wrapper" wrapSetHandler_QPrinter1_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QPrinterSc a) (QPrinter x0 -> IO (Int)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QPrinter1 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QPrinter1_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QPrinter_setHandler1 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> IO (CInt)
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return 0
else _handler x0obj
rvf <- rv
return (toCInt rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QdevType_h (QPrinter ()) (()) where
devType_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QPrinter_devType cobj_x0
foreign import ccall "qtc_QPrinter_devType" qtc_QPrinter_devType :: Ptr (TQPrinter a) -> IO CInt
instance QdevType_h (QPrinterSc a) (()) where
devType_h x0 ()
= withIntResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QPrinter_devType cobj_x0
instance QsetHandler (QPrinter ()) (QPrinter x0 -> IO (QPaintEngine t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QPrinter2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QPrinter2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QPrinter_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> IO (Ptr (TQPaintEngine t0))
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
foreign import ccall "qtc_QPrinter_setHandler2" qtc_QPrinter_setHandler2 :: Ptr (TQPrinter a) -> CWString -> Ptr (Ptr (TQPrinter x0) -> IO (Ptr (TQPaintEngine t0))) -> Ptr () -> Ptr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO ()
foreign import ccall "wrapper" wrapSetHandler_QPrinter2 :: (Ptr (TQPrinter x0) -> IO (Ptr (TQPaintEngine t0))) -> IO (FunPtr (Ptr (TQPrinter x0) -> IO (Ptr (TQPaintEngine t0))))
foreign import ccall "wrapper" wrapSetHandler_QPrinter2_d :: (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()) -> IO (FunPtr (Ptr fun -> Ptr state -> Ptr fun_d -> IO ()))
instance QsetHandler (QPrinterSc a) (QPrinter x0 -> IO (QPaintEngine t0)) where
setHandler _eobj _eid _handler
= do
funptr <- wrapSetHandler_QPrinter2 setHandlerWrapper
stptr <- newStablePtr (Wrap _handler)
funptr_d <- wrapSetHandler_QPrinter2_d setHandlerWrapper_d
withObjectPtr _eobj $ \cobj_eobj ->
withCWString _eid $ \cstr_eid ->
qtc_QPrinter_setHandler2 cobj_eobj cstr_eid (toCFunPtr funptr) (castStablePtrToPtr stptr) (toCFunPtr funptr_d)
return()
where
setHandlerWrapper :: Ptr (TQPrinter x0) -> IO (Ptr (TQPaintEngine t0))
setHandlerWrapper x0
= do x0obj <- objectFromPtr_nf x0
let rv =
if (objectIsNull x0obj)
then return $ objectCast x0obj
else _handler x0obj
rvf <- rv
withObjectPtr rvf $ \cobj_rvf -> return (cobj_rvf)
setHandlerWrapper_d :: Ptr fun -> Ptr () -> Ptr fun_d -> IO ()
setHandlerWrapper_d funptr stptr funptr_d
= do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
when (funptr_d/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr_d))
return ()
instance QpaintEngine_h (QPrinter ()) (()) where
paintEngine_h x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QPrinter_paintEngine cobj_x0
foreign import ccall "qtc_QPrinter_paintEngine" qtc_QPrinter_paintEngine :: Ptr (TQPrinter a) -> IO (Ptr (TQPaintEngine ()))
instance QpaintEngine_h (QPrinterSc a) (()) where
paintEngine_h x0 ()
= withObjectRefResult $
withObjectPtr x0 $ \cobj_x0 ->
qtc_QPrinter_paintEngine cobj_x0
| keera-studios/hsQt | Qtc/Gui/QPrinter_h.hs | bsd-2-clause | 15,633 | 0 | 18 | 3,572 | 5,106 | 2,438 | 2,668 | -1 | -1 |
module Sodium.Haskell.Render (render) where
import qualified Data.Char as C
import qualified Text.PrettyPrint as P
import Sodium.Haskell.Program
vsep = foldr (P.$+$) P.empty
render :: Program -> String
render (Program defs imports exts)
= P.render
$ vsep
$ [renderExts exts]
++ map renderImport imports
++ map (\(Def def) -> renderDef def) defs
renderExts cs
= P.hsep
[ P.text "{-#"
, P.text "LANGUAGE"
, P.hsep
$ P.punctuate P.comma
$ map P.text cs
, P.text "#-}"
]
renderImport cs
= P.text "import"
P.<+> P.text cs
renderDef (ValueDef pat expr)
= P.hsep
[ renderPattern pat
, P.text "="
, renderExpression expr
]
renderDef (GuardDef pat leafs)
= renderPattern pat
P.$+$
( P.nest 4
$ vsep
$ map renderLeaf
$ leafs
)
where
renderLeaf (expr1, expr2)
= P.hsep
[ P.text "|"
, renderExpression expr1
, P.text "="
, renderExpression expr2
]
data Fixity
= LFix
| RFix
| NFix
deriving Eq
data Level
= ALevel Integer Fixity
| HLevel
| SLevel
hlevel = (, HLevel)
renderExpr :: Expression -> (P.Doc, Level)
renderExpr (Access name)
= hlevel (renderName name)
renderExpr (Tuple [expr])
= renderExpr expr
renderExpr (Tuple exprs)
= hlevel
$ P.parens
$ P.hsep
$ P.punctuate P.comma
$ map renderExpression
$ exprs
renderExpr (Beta (Beta (Access "enumFromTo") exprFrom) exprTo)
= let
(operand1, level1) = renderExpr exprFrom
(operand2, level2) = renderExpr exprTo
wrap = \case
SLevel -> P.parens
_ -> id
doc = wrap level1 operand1 P.<> P.text ".." P.<> wrap level2 operand2
in hlevel (P.brackets doc)
renderExpr (Primary lit)
= hlevel
$ case lit of
Quote cs -> P.text (show cs)
INumber intSection -> P.text intSection
FNumber intSection fracSection -> P.hcat
[ P.text intSection
, P.text "."
, P.text fracSection
]
ENumber intSection fracSection eSign eSection -> P.hcat
[ P.text intSection
, P.text "."
, P.text fracSection
, P.text "e"
, if eSign
then P.text "+"
else P.text "-"
, P.text eSection
]
renderExpr (Beta (Beta (Access op) expr1) expr2)
| isInfix op = renderBinary op expr1 expr2
renderExpr (Beta expr1 expr2) = renderBinary "" expr1 expr2
renderExpr expr = (renderExpression expr, SLevel)
isLetter c = C.isAlpha c || c == '_'
isInfix op
= op `elem` ["elem", "div", "mod"]
|| null op
|| (not . isLetter) (head op)
renderOp op lhs rhs = lhs P.<+> rOp P.<+> rhs
where rOp
| null op = P.empty
| isLetter (head op) = P.char '`' P.<> P.text op P.<> P.char '`'
| otherwise = P.text op
renderBinary op expr1 expr2
= (renderOp op lhs rhs, uncurry ALevel opfix)
where
opfix = whatfix op
(operand1, level1) = renderExpr expr1
(operand2, level2) = renderExpr expr2
defaultHandler n m _
| m > n = id
| otherwise = P.parens
advHandler n f m g
| m > n = id
| f == g && m == n = id
| otherwise = P.parens
(handler1, handler2) = case opfix of
(n, NFix) -> (\a -> (a, a)) (defaultHandler n)
(n, LFix) -> (advHandler n LFix, defaultHandler n)
(n, RFix) -> (defaultHandler n, advHandler n RFix)
wrap handler = \case
SLevel -> P.parens
HLevel -> id
ALevel n fix -> handler n fix
lhs = wrap handler1 level1 operand1
rhs = wrap handler2 level2 operand2
whatfix op
= maybe (9, LFix) id (lookup op fixtable)
where fixtable =
[ ("", (10, LFix))
, ("+", (6, LFix))
, ("-", (6, LFix))
, ("*", (7, LFix))
, ("/", (7, LFix))
]
renderExpression (Lambda pats expr)
= P.hsep
[ P.hcat
[ P.text "\\"
, P.hsep $ map renderPattern pats
]
, P.text "->"
, renderExpression expr
]
renderExpression (Typed expr t)
= P.hsep
[ case renderExpr expr of
(r, HLevel) -> r
(r, _) -> P.parens r
, P.text "::"
, renderType t
]
renderExpression (DoExpression [DoExecute expr])
= renderExpression expr
renderExpression (DoExpression statements)
= P.text "do"
P.$+$ (P.nest 4 $ vsep $ map renderStatement statements)
renderExpression (PureLet valueDefs expr)
= P.text "let"
P.$+$ (P.nest 4 $ vsep $ map renderDef valueDefs)
P.$+$ (P.text "in" P.<+> renderExpression expr)
renderExpression (IfExpression expr bodyThen bodyElse)
= (P.text "if" P.<+> renderExpression expr)
P.$+$ (P.text "then" P.<+> renderExpression bodyThen)
P.$+$ (P.text "else" P.<+> renderExpression bodyElse)
renderExpression expr = fst (renderExpr expr)
renderName name
| null name = P.empty
| (not . isLetter) (head name) = P.parens (P.text name)
| otherwise = P.text name
renderType = \case
HsType cs -> P.text cs
HsUnit -> P.text "()"
HsIO t -> renderName "IO" P.<+> renderType t
renderStatement (DoBind pat expr)
= P.hsep
[ renderPattern pat
, P.text "<-"
, renderExpression expr
]
renderStatement (DoLet pat expr)
= P.hsep
[ P.text "let"
, renderPattern pat
, P.text "="
, renderExpression expr
]
renderStatement (DoExecute expr)
= renderExpression expr
renderPattern (PatFunc name names)
= renderName name P.<+> P.hsep (map renderName names)
renderPattern (PatTuple [name])
= renderName name
renderPattern (PatTuple names)
= P.parens
$ P.hsep
$ P.punctuate P.comma
$ map renderName
$ names
| kirbyfan64/sodium | src/Sodium/Haskell/Render.hs | bsd-3-clause | 5,123 | 126 | 14 | 1,108 | 2,335 | 1,171 | 1,164 | -1 | -1 |
-- | Like Control.Concurrent.MVar.Strict but reduce to HNF, not NF
{-# LANGUAGE CPP, MagicHash, UnboxedTuples #-}
module Control.Distributed.Process.Internal.StrictMVar
( StrictMVar(StrictMVar)
, newEmptyMVar
, newMVar
, takeMVar
, putMVar
, readMVar
, withMVar
, modifyMVar_
, modifyMVar
, modifyMVarMasked
, mkWeakMVar
) where
import Control.Applicative ((<$>))
import Control.Monad ((>=>))
#if MIN_VERSION_base(4,6,0)
import Control.Exception (evaluate)
#else
import Control.Exception (evaluate, mask_, onException)
#endif
import qualified Control.Concurrent.MVar as MVar
( MVar
, newEmptyMVar
, newMVar
, takeMVar
, putMVar
, readMVar
, withMVar
, modifyMVar_
, modifyMVar
#if MIN_VERSION_base(4,6,0)
, modifyMVarMasked
#endif
)
import GHC.MVar (MVar(MVar))
import GHC.IO (IO(IO), unIO)
import GHC.Exts (mkWeak#)
import GHC.Weak (Weak(Weak))
newtype StrictMVar a = StrictMVar (MVar.MVar a)
newEmptyMVar :: IO (StrictMVar a)
newEmptyMVar = StrictMVar <$> MVar.newEmptyMVar
newMVar :: a -> IO (StrictMVar a)
newMVar x = evaluate x >> StrictMVar <$> MVar.newMVar x
takeMVar :: StrictMVar a -> IO a
takeMVar (StrictMVar v) = MVar.takeMVar v
putMVar :: StrictMVar a -> a -> IO ()
putMVar (StrictMVar v) x = evaluate x >> MVar.putMVar v x
readMVar :: StrictMVar a -> IO a
readMVar (StrictMVar v) = MVar.readMVar v
withMVar :: StrictMVar a -> (a -> IO b) -> IO b
withMVar (StrictMVar v) = MVar.withMVar v
modifyMVar_ :: StrictMVar a -> (a -> IO a) -> IO ()
modifyMVar_ (StrictMVar v) f = MVar.modifyMVar_ v (f >=> evaluate)
modifyMVar :: StrictMVar a -> (a -> IO (a, b)) -> IO b
modifyMVar (StrictMVar v) f = MVar.modifyMVar v (f >=> evaluateFst)
where
evaluateFst :: (a, b) -> IO (a, b)
evaluateFst (x, y) = evaluate x >> return (x, y)
modifyMVarMasked :: StrictMVar a -> (a -> IO (a, b)) -> IO b
modifyMVarMasked (StrictMVar v) f =
#if MIN_VERSION_base(4,6,0)
MVar.modifyMVarMasked v (f >=> evaluateFst)
#else
mask_ $ do
a <- MVar.takeMVar v
(a',b) <- (f a >>= evaluate) `onException` MVar.putMVar v a
MVar.putMVar v a'
return b
#endif
where
evaluateFst :: (a, b) -> IO (a, b)
evaluateFst (x, y) = evaluate x >> return (x, y)
mkWeakMVar :: StrictMVar a -> IO () -> IO (Weak (StrictMVar a))
mkWeakMVar q@(StrictMVar (MVar m#)) f = IO $ \s ->
#if MIN_VERSION_base(4,9,0)
case mkWeak# m# q (unIO f) s of (# s', w #) -> (# s', Weak w #)
#else
case mkWeak# m# q f s of (# s', w #) -> (# s', Weak w #)
#endif
| haskell-distributed/distributed-process | src/Control/Distributed/Process/Internal/StrictMVar.hs | bsd-3-clause | 2,511 | 0 | 11 | 502 | 853 | 465 | 388 | 64 | 1 |
module Database.PropertyGraph.Neo4jChunked where
{-
import Database.PropertyGraph.Internal (
PropertyGraphT,PropertyGraphF(NewVertex,NewEdge),
VertexId(VertexId,unVertexId))
import Database.PropertyGraph.Neo4jBatch (add,vertexRequest,edgeRequest)
import Control.Proxy (Proxy,Producer,Consumer,liftP,request,respond,(>->),(>=>),toListD,unitU)
import Control.Proxy.Trans.State (StateP,modify,get)
import Control.Monad (forever)
import Control.Monad.IO.Class (MonadIO,liftIO)
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Free (FreeF(Pure,Free),runFreeT)
import Control.Error.Util (note,hush)
import Control.Error.Safe (lastErr,readErr)
import Data.Map (Map)
import Data.List.Split (splitOn)
import qualified Data.Map as Map (lookup,union,empty,insert)
import qualified Data.HashMap.Strict as HashMap (lookup)
import qualified Data.Vector as Vector (foldM')
import Data.Text (pack,unpack)
import qualified Data.Aeson as JSON (Value(Array,Number,String,Object),json,toJSON)
import qualified Data.Attoparsec.Number as Attoparsec (Number(I))
import Network.URI (parseURI,uriPath)
-- | Insert a property graph into the given neo4j database in chunks of the given size.
runPropertyGraphT :: (MonadIO m) => Hostname -> Port -> Int -> PropertyGraphT m r -> m r
runPropertyGraphT hostname port n propertygraph = runProxy $ evalStateK Map.empty $ evalStateK 0 $
interpretPropertyGraphT propertygraph >->
chunk n >->
liftP . (insertPropertyGraphT hostname port)
-- | Interpret a property graph as the list of neo4j requests and a running unique Id.
interpretPropertyGraphT :: (Proxy p,Monad m) => PropertyGraphT m r -> () ->
Producer (StateP Integer (StateP (Map Integer Integer) p)) JSON.Value m r
interpretPropertyGraphT propertygraph () = do
temporaryid <- modify (+1) >> get
next <- lift (runFreeT propertygraph)
case next of
Pure x -> return x
Free (NewVertex properties continue) -> do
respond (vertexRequest properties temporaryid)
interpretPropertyGraphT (continue (VertexId temporaryid)) ()
Free (NewEdge properties label fromid toid continue) -> do
vertexidmap <- liftP get
let temporaryOrExisting tempid = case Map.lookup tempid vertexidmap of
Nothing -> pack ("{"++show tempid++"}")
Just existingid -> pack ("/node/"++show existingid)
fromuri = temporaryOrExisting (unVertexId fromid)
touri = temporaryOrExisting (unVertexId toid)
respond (edgeRequest properties label fromuri touri temporaryid)
interpretPropertyGraphT continue ()
-- | Insert chunks of batch requests into a neo4j database...
insertPropertyGraphT :: (Proxy p,MonadIO m) => Hostname -> Port -> () -> Consumer (StateP (Map Integer Integer) p) [JSON.Value] m r
insertPropertyGraphT hostname port () = forever (do
requestvalues <- request ()
result <- liftIO (add hostname port (JSON.toJSON requestvalues))
case result of
Left e -> liftIO (print e)
Right resultvalue -> do
case matchTemporaryIds resultvalue of
Left e -> liftIO (print e)
Right vertexidmap -> modify (Map.union vertexidmap))
-- | In a neo4j batch insertion response find a match between temporary
-- ids and new ids.
matchTemporaryIds :: JSON.Value -> Either String (Map Integer Integer)
matchTemporaryIds (JSON.Array array) = note "Extracting neo4j response info failed" (Vector.foldM' insertIds Map.empty array)
matchTemporaryIds _ = Left "Neo4j batch response is not an array"
-- | Extract the tempid existingid pair from the given json object and insert it
-- into the given map.
insertIds :: Map Integer Integer -> JSON.Value -> Maybe (Map Integer Integer)
insertIds vertexidmap object = do
(temporaryid,existingid) <- extractIds object
return (Map.insert temporaryid existingid vertexidmap)
-- | From a part of a neo4j batch insertion response extract the id and
-- the id part of the location.
extractIds :: JSON.Value -> Maybe (Integer,Integer)
extractIds (JSON.Object object) = do
tempidvalue <- HashMap.lookup (pack "id") object
tempid <- case tempidvalue of
JSON.Number (Attoparsec.I tempid) -> Just tempid
_ -> Nothing
existingidvalue <- HashMap.lookup (pack "location") object
existingiduriststring <- case existingidvalue of
JSON.String existingiduriststring -> Just existingiduriststring
_ -> Nothing
existingiduri <- parseURI (unpack existingiduriststring)
existingidstring <- hush (lastErr "last of empty list" (splitOn "/" (uriPath existingiduri)))
existingid <- hush (readErr "cannot read id" existingidstring)
return (tempid,existingid)
chunk p = (liftP . p >-> toListD >-> unitU) >=> respond
-} | phischu/property-graph-dsl | src/Database/PropertyGraph/Neo4jChunked.hs | bsd-3-clause | 4,898 | 0 | 3 | 997 | 8 | 6 | 2 | 1 | 0 |
{-# LANGUAGE ForeignFunctionInterface, TypeFamilies, FlexibleInstances,
MultiParamTypeClasses, OverloadedStrings, Rank2Types, EmptyDataDecls,
JavaScriptFFI, RecursiveDo #-}
module FRP.Sodium.GameEngine2D.WebGL where
import FRP.Sodium.GameEngine2D.Cache
import FRP.Sodium.GameEngine2D.CleanMouse
import FRP.Sodium.GameEngine2D.Geometry
import FRP.Sodium.GameEngine2D.Platform
import Control.Applicative
import Control.Concurrent
import Control.Exception
import Control.Monad.Reader
import qualified Data.ByteString.Char8 as C
import Data.IORef
import Data.Monoid
import Data.String
import qualified Data.Text as T
import Data.Time.Clock
import FRP.Sodium
import GHCJS.Foreign
import GHCJS.Types
import GHCJS.Marshal
import Numeric
import System.FilePath
import System.Random (newStdGen)
data WebGL
appendCache :: Maybe (ReaderT SpriteState IO ()) -> Maybe (ReaderT SpriteState IO ()) -> Maybe (ReaderT SpriteState IO ())
appendCache (Just a) (Just b) = Just (a >> b)
appendCache ja@(Just _) _ = ja
appendCache _ mb = mb
instance Monoid (Sprite WebGL) where
mempty = Sprite NullKey ((0,0),(0,0)) Nothing $ return ()
Sprite ka ra ca a `mappend` Sprite kb rb cb b =
Sprite (ka `appendKey` kb) (ra `appendRect` rb) (ca `appendCache` cb) (a >> b)
data GL_
type GL = JSRef GL_
foreign import javascript unsafe "initGL($1,$2,$3)"
initGL :: JSFun (JSRef Float -> JSRef Float -> IO ())
-> JSFun (JSRef Float -> JSRef Float -> IO ())
-> JSFun (JSRef Float -> JSRef Float -> IO ())
-> IO GL
foreign import javascript unsafe "startRendering()"
startRendering :: IO ()
foreign import javascript unsafe "endRendering()"
endRendering :: IO ()
foreign import javascript unsafe "toRedraw()"
toRedraw :: IO Bool
foreign import javascript unsafe "requestAnimFrame2($1)"
requestAnimFrame :: JSFun (IO ()) -> IO ()
foreign import javascript unsafe "window.addEventListener('resize',$1);"
onWindowResize :: JSFun (IO ()) -> IO ()
foreign import javascript unsafe "canvas.offsetWidth"
canvasWidth :: IO Float
foreign import javascript unsafe "canvas.offsetHeight"
canvasHeight :: IO Float
foreign import javascript unsafe "resizeViewport($1,$2)"
resizeViewport :: Float -> Float -> IO ()
data Texture_
type Texture = JSRef Texture_
foreign import javascript unsafe "loadImage"
loadImage :: JSString -> Bool -> IO Texture
foreign import javascript unsafe "drawImage"
drawImage :: Texture -> Float -> Float -> Float -> Float -> Bool -> Float -> IO ()
foreign import javascript unsafe "destroyImage"
destroyImage :: Texture -> IO ()
animate :: IO () -> IO ()
animate drawScene = do
rec
let tick = do
drawScene
requestAnimFrame tick'
tick' <- syncCallback True False tick
requestAnimFrame tick'
data SpriteState = SpriteState {
ssInternals :: Internals WebGL
}
webGLImage :: Bool -> FilePath -> IO (Drawable WebGL)
webGLImage background path = do
let key = ByteStringKey $ C.pack $ takeFileName path
return $ \rect ->
let cacheIt = Just $ do
resPath <- asks (inResPath . ssInternals)
cache <- asks (inCache . ssInternals)
liftIO $ writeCache cache key $ do
--putStrLn $ "loading "++path
tex <- loadImage (fromString (resPath </> path)) background
let draw' ((x,y),(w,h)) = drawImage tex x y w h background 0
cleanup' = destroyImage tex
return (draw', cleanup')
drawIt = do
cache <- asks (inCache . ssInternals)
liftIO $ do
mDraw <- readCache cache key
case mDraw of
Just draw -> draw rect
Nothing -> return ()
in Sprite key rect cacheIt drawIt
-- | Get system time in seconds since the start of the Unix epoch
-- (1 Jan 1970).
getTime :: UTCTime -> IO Double
getTime t0 = do
t <- getCurrentTime
return $ realToFrac (t `diffUTCTime` t0)
instance Platform WebGL where
data Args WebGL = WebGLArgs FilePath
data Internals WebGL = WebGLInternals {
inResPath :: FilePath,
inCache :: Cache Rect
}
data Sprite WebGL = Sprite {
spKey :: Key,
spRect :: Rect, -- Bounding box
spCache :: Maybe (ReaderT SpriteState IO ()),
spDraw :: ReaderT SpriteState IO ()
}
data Font WebGL = Font
data Sound WebGL = Sound
type Touch WebGL = ()
engine' (WebGLArgs resPath) game = do
width0 <- canvasWidth
height0 <- canvasHeight
(viewport, sendViewport) <- sync $ newBehavior (width0, height0)
let aspect = uncurry (/) <$> viewport
(time, sendTime) <- sync $ newBehavior 0
(realTime, sendRealTime) <- sync $ newBehavior 0
rng0 <- newStdGen
(eMouse, sendMouse) <- sync newEvent
eCleanMouse <- sync $ cleanMouse eMouse
game GameInput {
giAspect = aspect,
giMouse = eCleanMouse,
giTime = time,
giRNG0 = rng0
} $ \GameOutput { goSprite = bSprite, goMusic = bMusic, goEffects = eEffects } -> do
spriteRef <- newIORef =<< sync (sample bSprite)
updatedRef <- newIORef True
kill <- sync $ listen (updates bSprite) $ \sprite -> do
writeIORef spriteRef sprite
writeIORef updatedRef True
let scaleClick (xx, yy) = do
(width, height) <- sync $ sample viewport
let xscale = 2000 * width / height
x = xscale * (xx / width - 0.5)
y = (-2000) * (yy / height - 0.5)
return (x, y)
blockedRef <- newIORef False
queuedRef <- newIORef Nothing
md <- syncCallback2 True False $ \jx jy -> do
Just xx <- fromJSRef jx :: IO (Maybe Float)
Just yy <- fromJSRef jy :: IO (Maybe Float)
pt <- scaleClick (xx, yy)
writeIORef queuedRef Nothing
sync $ sendMouse $ MouseDown () pt
mu <- syncCallback2 True False $ \jx jy -> do
Just xx <- fromJSRef jx :: IO (Maybe Float)
Just yy <- fromJSRef jy :: IO (Maybe Float)
pt <- scaleClick (xx, yy)
writeIORef queuedRef Nothing
sync $ sendMouse $ MouseUp () pt
mm <- syncCallback2 True False $ \jx jy -> do
Just xx <- fromJSRef jx :: IO (Maybe Float)
Just yy <- fromJSRef jy :: IO (Maybe Float)
pt <- scaleClick (xx, yy)
let mm = MouseMove () pt
blocked <- readIORef blockedRef
if blocked then
writeIORef queuedRef $ Just mm
else do
writeIORef blockedRef True
sync $ sendMouse $ MouseMove () pt
or <- syncCallback True False $ do
w <- canvasWidth
h <- canvasHeight
resizeViewport w h
sync $ sendViewport (w,h)
onWindowResize or
gl <- initGL md mu mm
resizeViewport width0 height0
t0 <- getCurrentTime
tLastEndRef <- newIORef =<< getTime t0
timeLostRef <- newIORef 0
tLastGC <- newIORef 0
cache <- newCache
let internals = WebGLInternals {
inResPath = resPath,
inCache = cache
}
animate $ do
t <- readIORef tLastEndRef
tStart <- getTime t0
lost <- readIORef timeLostRef
iHeight <- sync $ do
sendTime (t - lost)
sendRealTime t
round . snd <$> sample viewport
sprite <- readIORef spriteRef
updated <- readIORef updatedRef
jtor <- toRedraw
let toDraw = updated || jtor
when toDraw $ do
preRunSprite internals iHeight sprite
writeIORef updatedRef False
tEnd <- getTime t0
let lost = tEnd - t
when (lost >= 0.1) $ do
since <- (\last -> tEnd - last) <$> readIORef tLastGC
if lost >= 0.25 && since >= 3 then do
tEnd' <- getTime t0
writeIORef tLastGC tEnd'
let lost' = tEnd' - t
modifyIORef timeLostRef (+lost')
else
modifyIORef timeLostRef (+lost)
when toDraw $ do
startRendering
runSprite internals iHeight sprite True
endRendering
tFinal <- getTime t0
writeIORef tLastEndRef tFinal
{-
putStrLn $ showFFloat (Just 3) (tStart -t) "" ++ " " ++
showFFloat (Just 3) (tEnd - tStart) "" ++ " " ++
showFFloat (Just 3) (tFinal - tEnd) ""
-}
writeIORef blockedRef False
mQueued <- readIORef queuedRef
case mQueued of
Just mm -> do
sync $ sendMouse mm
writeIORef queuedRef Nothing
Nothing -> return ()
return ()
-- Keep callbacks alive
forM_ [(0::Int)..] $ \_ -> threadDelay 60000000
putStrLn "kill everything!"
kill
nullDrawable rect = Sprite NullKey rect Nothing (return ())
sound file = return Sound
image file = webGLImage False file
backgroundImage file = ($ ((0,0),(0,0))) <$> webGLImage True file -- give it a dummy rectangle
retainSound _ _ = return ()
translateSprite v@(vx, vy) (Sprite key rect cache action) = error "WebGL.translateSprite undefined"
createFont resPath ycorr = error "WebGL.createFont undefined"
--uncachedLabel rect@((posX, posY), _) (Color4 r g b _) text = error "WebGL.uncachedLabel undefined"
key k s = s { spKey = k }
keyOf d = spKey $ d ((0,0),(0,0))
setBoundingBox r s = s { spRect = r }
cache toMultisample (Sprite key rect@(pos, _) cache1 action) = error "WebGL.cache undefined"
fade brightness (Sprite key rect cache action) = error "WebGL.fade undefined"
shrink factor (Sprite key rect@((posX,posY),_) cache action) = error "WebGL.shrink undefined"
preRunSprite internals brightness (Sprite _ _ mCache action) = do
let ss = SpriteState {
ssInternals = internals
}
case mCache of
Just cache -> runReaderT cache ss
Nothing -> return ()
runSprite internals brightness (Sprite _ _ mCache action) flip = do
let ss = SpriteState {
ssInternals = internals
}
runReaderT action ss
when flip $ flipCache (inCache internals)
audioThread bSounds = error "WebGL.audioThread undefined"
clockwiseSprite (Sprite key rect cache action) = error "WebGL.clockwiseSprite undefined"
anticlockwiseSprite (Sprite key rect cache action) = error "WebGL.anticlockwiseSprite undefined"
rotateSprite theta (Sprite key rect@((posX, posY), _) cache action) = error "WebGL.rotateSprite undefined"
-- Make this sprite invisible, but allow it to cache anything in the invisible sprite
invisible (Sprite key rect cache _) = Sprite key rect cache (return ())
launchURL _ url = error "WebGL.launchURL undefined"
getSystemLanguage _ = return "en"
| the-real-blackh/sodium-2d-game-engine | FRP/Sodium/GameEngine2D/WebGL.hs | bsd-3-clause | 12,505 | 39 | 24 | 4,770 | 3,397 | 1,676 | 1,721 | -1 | -1 |
module Main where
import Test.QuickCheck
import Test.Framework
import Test.Framework.Providers.QuickCheck2(testProperty)
import TestCompare
import TestArithmetic
main = do defaultMainWithOpts tests opts
opts = RunnerOptions (Just 3) (Just testopts) Nothing
testopts = TestOptions (Just RandomSeed) (Just 10000) Nothing Nothing
tests = [testGroup "Comparison functions"
[testProperty "cmp" prop_cmp,
testProperty "isNaN" prop_isNaN,
testProperty "isNumber" prop_isNumber,
testProperty "isZero" prop_isZero,
testProperty "isInfinite" prop_isInfinite,
testProperty "sgn" prop_sgn],
testGroup "Basic arithmetic functions"
[testProperty "add" prop_add,
testProperty "sub" prop_sub,
testProperty "mul" prop_mul,
testProperty "div" prop_div,
testProperty "addw" prop_addw,
testProperty "addi" prop_addi,
testProperty "addd" prop_addd,
testProperty "subw" prop_subw,
testProperty "subi" prop_subi,
testProperty "subd" prop_subd,
testProperty "wsub" prop_wsub,
testProperty "isub" prop_isub,
testProperty "dsub" prop_dsub,
testProperty "mulw" prop_mulw,
testProperty "muli" prop_muli,
testProperty "muld" prop_muld,
testProperty "divw" prop_divw,
testProperty "divi" prop_divi,
testProperty "divd" prop_divd,
testProperty "wdiv" prop_wdiv,
testProperty "idiv" prop_idiv,
testProperty "ddiv" prop_ddiv
]
]
| ekmett/hmpfr | tests/RunTests.hs | bsd-3-clause | 2,114 | 0 | 8 | 949 | 340 | 175 | 165 | 39 | 1 |
module Network.Spata
(
module Network.Spata.DSL
) where
import Network.Spata.DSL hiding (p, predicate) | nfjinjing/spata | src/Network/Spata.hs | bsd-3-clause | 106 | 0 | 5 | 15 | 30 | 20 | 10 | 4 | 0 |
module Rad.QL
( module Rad.QL.Types
, module Rad.QL.Define
, Schema
, runQuery
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as BC8
import Data.Monoid ((<>))
import qualified Data.Aeson as JSON
import Rad.QL.Internal.Builders
import Rad.QL.Internal.Types
import Rad.QL.AST
import Rad.QL.Parser (parseDocument)
import Rad.QL.Types
import Rad.QL.Query (Schema, execute)
import Rad.QL.Define
runQuery :: (Monad m) => Schema m -> B.ByteString -> JSON.Value -> m Builder
runQuery s q v = collect
<$> either (\e -> return (byteString "null", [ BC8.pack e ]))
(execute s v)
(parseDocument q)
where collect (res, []) = byteString "{\"data\":" <> res <> byteString "}"
collect (res, es) = byteString "{\"data\":"
<> res
<> byteString ", \"errors\": "
<> joinList [ buildString e | e <- es ]
<> byteString "}"
| jqyu/bustle-chi | src/Rad/QL.hs | bsd-3-clause | 1,068 | 0 | 13 | 352 | 311 | 178 | 133 | 27 | 2 |
module Helpers where
import Control.Monad.Trans
import Data.Char
import qualified Data.List as List
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import System.IO
dropPrefix :: [Char] -> [Char] -> [Char]
dropPrefix pre str=
case pre `List.stripPrefix` str of
Just (n:ns) -> toLower n : ns
_ -> error "prefix now found"
debug :: MonadIO m => Text -> m ()
debug = liftIO . Text.hPutStrLn stderr
showText :: Show a => a -> Text
showText = Text.pack . show
| nejla/auth-service | auth-service-core/src/Helpers.hs | bsd-3-clause | 564 | 0 | 10 | 143 | 193 | 109 | 84 | 17 | 2 |
module ThriftUtils (remoteMesh,sblToV,vToSB) where
import Control.Applicative
import Control.Monad
import Data.ByteString.Char8 (ByteString)
import Data.Int
import Foreign.Marshal.Utils
import Foreign.Ptr
import Foreign.Storable
import Network
import qualified Data.ByteString.Char8 as SB
import qualified Data.ByteString.Lazy as LB
import qualified Data.Vector.Storable as V
import qualified Data.Vector.Storable.Mutable as MV
import qualified GraphicsPipeline as GFX
import Thrift.Protocol.Binary
import Thrift.Transport.Handle
import Thrift.ContentProvider_Client
import Thrift.Content_Types
import System.IO.Unsafe (unsafePerformIO)
sblToV :: Storable a => [SB.ByteString] -> V.Vector a
sblToV ls = v
where
offs o (s:xs) = (o,s):offs (o + SB.length s) xs
offs _ [] = []
cnt = sum (map SB.length ls) `div` (sizeOf $ V.head v)
v = unsafePerformIO $ do
mv <- MV.new cnt
MV.unsafeWith mv $ \dst -> forM_ (offs 0 ls) $ \(o,s) ->
SB.useAsCStringLen s $ \(src,len) -> moveBytes (plusPtr dst o) src len
V.unsafeFreeze mv
vToSB :: Storable a => V.Vector a -> SB.ByteString
vToSB v = unsafePerformIO $ do
let len = V.length v * sizeOf (V.head v)
V.unsafeWith v $ \p -> SB.packCStringLen (castPtr p,len)
toV :: Storable a => [LB.ByteString] -> V.Vector a
toV lb = sblToV $ concatMap LB.toChunks lb
unpackAttribute :: VertexAttribute -> (ByteString,GFX.Attribute)
unpackAttribute (VertexAttribute (Just an) (Just at) (Just ad)) = (,) (SB.pack an) $ case at of
AT_Float -> GFX.A_Float $ toV ad
AT_Vec2 -> GFX.A_Vec2 $ toV ad
AT_Vec3 -> GFX.A_Vec3 $ toV ad
AT_Vec4 -> GFX.A_Vec4 $ toV ad
AT_Mat2 -> GFX.A_Mat2 $ toV ad
AT_Mat3 -> GFX.A_Mat3 $ toV ad
AT_Mat4 -> GFX.A_Mat4 $ toV ad
AT_Int -> GFX.A_Int $ toV ad
AT_Word -> GFX.A_Word $ toV ad
remoteMesh :: ByteString -> IO GFX.Mesh
remoteMesh name = do
let toVInt :: V.Vector Int32 -> V.Vector Int
toVInt = V.map fromIntegral
p <- BinaryProtocol <$> hOpen ("localhost", PortNumber 9090)
Mesh (Just attrs) (Just prim) idx <- downloadMesh (p,p) $ SB.unpack name
return $ GFX.Mesh (map unpackAttribute attrs) $ case (prim,idx) of
(PT_Points,Nothing) -> GFX.Points
(PT_TriangleStrip,Nothing) -> GFX.TriangleStrip
(PT_Triangles,Nothing) -> GFX.Triangles
(PT_TriangleStrip,Just i) -> GFX.TriangleStripI $ toVInt $ toV i
(PT_Triangles,Just i) -> GFX.TrianglesI $ toVInt $ toV i
_ -> error "Invalid primitive!"
| csabahruska/GFXDemo | ThriftUtils.hs | bsd-3-clause | 2,606 | 0 | 18 | 603 | 986 | 513 | 473 | 59 | 9 |
{-# LANGUAGE PatternGuards #-}
-- | Class for an imperative ABC runtime
module ABC.Imperative.Runtime
( Runtime(..)
, invokeDefault
) where
import Control.Applicative
import ABC.Imperative.Value
-- | an ABC environment must handle capability tokens.
-- (Though, it is free to fail if a token is unrecognized.)
-- todo: consider switch to `String -> cx (Prog cx)` for staging.
class (Monad cx, Applicative cx) => Runtime cx where
invoke :: String -> Prog cx
invoke = invokeDefault
instance Runtime IO
-- | invokeFails is a suitable call for unrecognized tokens
--
-- This will pass unrecognized annotations, handle some standard
-- annotations, and also handle discretionary seal and unseal
-- actions. Otherwise, it will fail with appropriate message.
invokeDefault :: (Monad cx) => String -> Prog cx
invokeDefault ('&':anno) = invokeAnno anno
invokeDefault s@(':':_) = seal s
invokeDefault ('.':s) = unseal s
invokeDefault tok = fail . emsg where
emsg v = "{" ++ tok ++ "} token not recognized @ " ++ show v
seal :: (Monad cx) => String -> V cx -> cx (V cx)
seal s (P v e) = return (P (S s v) e)
seal s v = fail $ "{" ++ s ++ "} (seal) @ " ++ show v
unseal :: (Monad cx) => String -> V cx -> cx (V cx)
unseal s (P (S (':':s') v) e) | (s == s') = return (P v e)
unseal s v = fail $ "{." ++ s ++ "} (unseal) @ " ++ show v
-- default annotations... only one right now
invokeAnno :: (Monad cx) => String -> Prog cx
invokeAnno ('≡':[]) = assertEQ
invokeAnno _ = return
| dmbarbour/awelon | hsrc/ABC/Imperative/Runtime.hs | bsd-3-clause | 1,502 | 0 | 11 | 310 | 470 | 245 | 225 | 25 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE Trustworthy #-} -- can't use Safe due to IsList instance
{-# LANGUAGE TypeFamilies #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.List.NonEmpty
-- Copyright : (C) 2011-2015 Edward Kmett,
-- (C) 2010 Tony Morris, Oliver Taylor, Eelis van der Weegen
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : [email protected]
-- Stability : provisional
-- Portability : portable
--
-- A 'NonEmpty' list is one which always has at least one element, but
-- is otherwise identical to the traditional list type in complexity
-- and in terms of API. You will almost certainly want to import this
-- module @qualified@.
--
-- @since 4.8.2.0
----------------------------------------------------------------------------
module Data.List.NonEmpty (
-- * The type of non-empty streams
NonEmpty(..)
-- * Non-empty stream transformations
, map -- :: (a -> b) -> NonEmpty a -> NonEmpty b
, intersperse -- :: a -> NonEmpty a -> NonEmpty a
, scanl -- :: Foldable f => (b -> a -> b) -> b -> f a -> NonEmpty b
, scanr -- :: Foldable f => (a -> b -> b) -> b -> f a -> NonEmpty b
, scanl1 -- :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
, scanr1 -- :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
, transpose -- :: NonEmpty (NonEmpty a) -> NonEmpty (NonEmpty a)
, sortBy -- :: (a -> a -> Ordering) -> NonEmpty a -> NonEmpty a
, sortWith -- :: Ord o => (a -> o) -> NonEmpty a -> NonEmpty a
-- * Basic functions
, length -- :: NonEmpty a -> Int
, head -- :: NonEmpty a -> a
, tail -- :: NonEmpty a -> [a]
, last -- :: NonEmpty a -> a
, init -- :: NonEmpty a -> [a]
, (<|), cons -- :: a -> NonEmpty a -> NonEmpty a
, uncons -- :: NonEmpty a -> (a, Maybe (NonEmpty a))
, unfoldr -- :: (a -> (b, Maybe a)) -> a -> NonEmpty b
, sort -- :: NonEmpty a -> NonEmpty a
, reverse -- :: NonEmpty a -> NonEmpty a
, inits -- :: Foldable f => f a -> NonEmpty a
, tails -- :: Foldable f => f a -> NonEmpty a
-- * Building streams
, iterate -- :: (a -> a) -> a -> NonEmpty a
, repeat -- :: a -> NonEmpty a
, cycle -- :: NonEmpty a -> NonEmpty a
, unfold -- :: (a -> (b, Maybe a) -> a -> NonEmpty b
, insert -- :: (Foldable f, Ord a) => a -> f a -> NonEmpty a
, some1 -- :: Alternative f => f a -> f (NonEmpty a)
-- * Extracting sublists
, take -- :: Int -> NonEmpty a -> [a]
, drop -- :: Int -> NonEmpty a -> [a]
, splitAt -- :: Int -> NonEmpty a -> ([a], [a])
, takeWhile -- :: Int -> NonEmpty a -> [a]
, dropWhile -- :: Int -> NonEmpty a -> [a]
, span -- :: Int -> NonEmpty a -> ([a],[a])
, break -- :: Int -> NonEmpty a -> ([a],[a])
, filter -- :: (a -> Bool) -> NonEmpty a -> [a]
, partition -- :: (a -> Bool) -> NonEmpty a -> ([a],[a])
, group -- :: Foldable f => Eq a => f a -> [NonEmpty a]
, groupBy -- :: Foldable f => (a -> a -> Bool) -> f a -> [NonEmpty a]
, groupWith -- :: (Foldable f, Eq b) => (a -> b) -> f a -> [NonEmpty a]
, groupAllWith -- :: (Foldable f, Ord b) => (a -> b) -> f a -> [NonEmpty a]
, group1 -- :: Eq a => NonEmpty a -> NonEmpty (NonEmpty a)
, groupBy1 -- :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty (NonEmpty a)
, groupWith1 -- :: (Foldable f, Eq b) => (a -> b) -> f a -> NonEmpty (NonEmpty a)
, groupAllWith1 -- :: (Foldable f, Ord b) => (a -> b) -> f a -> NonEmpty (NonEmpty a)
-- * Sublist predicates
, isPrefixOf -- :: Foldable f => f a -> NonEmpty a -> Bool
-- * \"Set\" operations
, nub -- :: Eq a => NonEmpty a -> NonEmpty a
, nubBy -- :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty a
-- * Indexing streams
, (!!) -- :: NonEmpty a -> Int -> a
-- * Zipping and unzipping streams
, zip -- :: NonEmpty a -> NonEmpty b -> NonEmpty (a,b)
, zipWith -- :: (a -> b -> c) -> NonEmpty a -> NonEmpty b -> NonEmpty c
, unzip -- :: NonEmpty (a, b) -> (NonEmpty a, NonEmpty b)
-- * Functions on streams of characters
, words -- :: NonEmpty Char -> NonEmpty String
, unwords -- :: NonEmpty String -> NonEmpty Char
, lines -- :: NonEmpty Char -> NonEmpty String
, unlines -- :: NonEmpty String -> NonEmpty Char
-- * Converting to and from a list
, fromList -- :: [a] -> NonEmpty a
, toList -- :: NonEmpty a -> [a]
, nonEmpty -- :: [a] -> Maybe (NonEmpty a)
, xor -- :: NonEmpty a -> Bool
) where
import Prelude hiding (break, cycle, drop, dropWhile,
filter, foldl, foldr, head, init, iterate,
last, length, lines, map, repeat, reverse,
scanl, scanl1, scanr, scanr1, span,
splitAt, tail, take, takeWhile, unlines,
unwords, unzip, words, zip, zipWith, (!!))
import qualified Prelude
import Control.Applicative (Alternative, many)
import Control.Monad (ap)
import Control.Monad.Fix
import Control.Monad.Zip (MonadZip(..))
import Data.Data (Data)
import Data.Foldable hiding (length, toList)
import qualified Data.Foldable as Foldable
import Data.Function (on)
import qualified Data.List as List
import Data.Ord (comparing)
import qualified GHC.Exts as Exts (IsList(..))
import GHC.Generics (Generic, Generic1)
infixr 5 :|, <|
-- | Non-empty (and non-strict) list type.
--
-- @since 4.8.2.0
data NonEmpty a = a :| [a]
deriving ( Eq, Ord, Show, Read, Data, Generic, Generic1 )
instance Exts.IsList (NonEmpty a) where
type Item (NonEmpty a) = a
fromList = fromList
toList = toList
instance MonadFix NonEmpty where
mfix f = case fix (f . head) of
~(x :| _) -> x :| mfix (tail . f)
instance MonadZip NonEmpty where
mzip = zip
mzipWith = zipWith
munzip = unzip
-- | Number of elements in 'NonEmpty' list.
length :: NonEmpty a -> Int
length (_ :| xs) = 1 + Prelude.length xs
-- | Compute n-ary logic exclusive OR operation on 'NonEmpty' list.
xor :: NonEmpty Bool -> Bool
xor (x :| xs) = foldr xor' x xs
where xor' True y = not y
xor' False y = y
-- | 'unfold' produces a new stream by repeatedly applying the unfolding
-- function to the seed value to produce an element of type @b@ and a new
-- seed value. When the unfolding function returns 'Nothing' instead of
-- a new seed value, the stream ends.
unfold :: (a -> (b, Maybe a)) -> a -> NonEmpty b
unfold f a = case f a of
(b, Nothing) -> b :| []
(b, Just c) -> b <| unfold f c
-- | 'nonEmpty' efficiently turns a normal list into a 'NonEmpty' stream,
-- producing 'Nothing' if the input is empty.
nonEmpty :: [a] -> Maybe (NonEmpty a)
nonEmpty [] = Nothing
nonEmpty (a:as) = Just (a :| as)
-- | 'uncons' produces the first element of the stream, and a stream of the
-- remaining elements, if any.
uncons :: NonEmpty a -> (a, Maybe (NonEmpty a))
uncons ~(a :| as) = (a, nonEmpty as)
-- | The 'unfoldr' function is analogous to "Data.List"'s
-- 'Data.List.unfoldr' operation.
unfoldr :: (a -> (b, Maybe a)) -> a -> NonEmpty b
unfoldr f a = case f a of
(b, mc) -> b :| maybe [] go mc
where
go c = case f c of
(d, me) -> d : maybe [] go me
instance Functor NonEmpty where
fmap f ~(a :| as) = f a :| fmap f as
b <$ ~(_ :| as) = b :| (b <$ as)
instance Applicative NonEmpty where
pure a = a :| []
(<*>) = ap
instance Monad NonEmpty where
~(a :| as) >>= f = b :| (bs ++ bs')
where b :| bs = f a
bs' = as >>= toList . f
instance Traversable NonEmpty where
traverse f ~(a :| as) = (:|) <$> f a <*> traverse f as
instance Foldable NonEmpty where
foldr f z ~(a :| as) = f a (foldr f z as)
foldl f z ~(a :| as) = foldl f (f z a) as
foldl1 f ~(a :| as) = foldl f a as
foldMap f ~(a :| as) = f a `mappend` foldMap f as
fold ~(m :| ms) = m `mappend` fold ms
-- | Extract the first element of the stream.
head :: NonEmpty a -> a
head ~(a :| _) = a
-- | Extract the possibly-empty tail of the stream.
tail :: NonEmpty a -> [a]
tail ~(_ :| as) = as
-- | Extract the last element of the stream.
last :: NonEmpty a -> a
last ~(a :| as) = List.last (a : as)
-- | Extract everything except the last element of the stream.
init :: NonEmpty a -> [a]
init ~(a :| as) = List.init (a : as)
-- | Prepend an element to the stream.
(<|) :: a -> NonEmpty a -> NonEmpty a
a <| ~(b :| bs) = a :| b : bs
-- | Synonym for '<|'.
cons :: a -> NonEmpty a -> NonEmpty a
cons = (<|)
-- | Sort a stream.
sort :: Ord a => NonEmpty a -> NonEmpty a
sort = lift List.sort
-- | Converts a normal list to a 'NonEmpty' stream.
--
-- Raises an error if given an empty list.
fromList :: [a] -> NonEmpty a
fromList (a:as) = a :| as
fromList [] = error "NonEmpty.fromList: empty list"
-- | Convert a stream to a normal list efficiently.
toList :: NonEmpty a -> [a]
toList ~(a :| as) = a : as
-- | Lift list operations to work on a 'NonEmpty' stream.
--
-- /Beware/: If the provided function returns an empty list,
-- this will raise an error.
lift :: Foldable f => ([a] -> [b]) -> f a -> NonEmpty b
lift f = fromList . f . Foldable.toList
-- | Map a function over a 'NonEmpty' stream.
map :: (a -> b) -> NonEmpty a -> NonEmpty b
map f ~(a :| as) = f a :| fmap f as
-- | The 'inits' function takes a stream @xs@ and returns all the
-- finite prefixes of @xs@.
inits :: Foldable f => f a -> NonEmpty [a]
inits = fromList . List.inits . Foldable.toList
-- | The 'tails' function takes a stream @xs@ and returns all the
-- suffixes of @xs@.
tails :: Foldable f => f a -> NonEmpty [a]
tails = fromList . List.tails . Foldable.toList
-- | @'insert' x xs@ inserts @x@ into the last position in @xs@ where it
-- is still less than or equal to the next element. In particular, if the
-- list is sorted beforehand, the result will also be sorted.
insert :: (Foldable f, Ord a) => a -> f a -> NonEmpty a
insert a = fromList . List.insert a . Foldable.toList
-- | @'some1' x@ sequences @x@ one or more times.
some1 :: Alternative f => f a -> f (NonEmpty a)
some1 x = (:|) <$> x <*> many x
-- | 'scanl' is similar to 'foldl', but returns a stream of successive
-- reduced values from the left:
--
-- > scanl f z [x1, x2, ...] == z :| [z `f` x1, (z `f` x1) `f` x2, ...]
--
-- Note that
--
-- > last (scanl f z xs) == foldl f z xs.
scanl :: Foldable f => (b -> a -> b) -> b -> f a -> NonEmpty b
scanl f z = fromList . List.scanl f z . Foldable.toList
-- | 'scanr' is the right-to-left dual of 'scanl'.
-- Note that
--
-- > head (scanr f z xs) == foldr f z xs.
scanr :: Foldable f => (a -> b -> b) -> b -> f a -> NonEmpty b
scanr f z = fromList . List.scanr f z . Foldable.toList
-- | 'scanl1' is a variant of 'scanl' that has no starting value argument:
--
-- > scanl1 f [x1, x2, ...] == x1 :| [x1 `f` x2, x1 `f` (x2 `f` x3), ...]
scanl1 :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
scanl1 f ~(a :| as) = fromList (List.scanl f a as)
-- | 'scanr1' is a variant of 'scanr' that has no starting value argument.
scanr1 :: (a -> a -> a) -> NonEmpty a -> NonEmpty a
scanr1 f ~(a :| as) = fromList (List.scanr1 f (a:as))
-- | 'intersperse x xs' alternates elements of the list with copies of @x@.
--
-- > intersperse 0 (1 :| [2,3]) == 1 :| [0,2,0,3]
intersperse :: a -> NonEmpty a -> NonEmpty a
intersperse a ~(b :| bs) = b :| case bs of
[] -> []
_ -> a : List.intersperse a bs
-- | @'iterate' f x@ produces the infinite sequence
-- of repeated applications of @f@ to @x@.
--
-- > iterate f x = x :| [f x, f (f x), ..]
iterate :: (a -> a) -> a -> NonEmpty a
iterate f a = a :| List.iterate f (f a)
-- | @'cycle' xs@ returns the infinite repetition of @xs@:
--
-- > cycle [1,2,3] = 1 :| [2,3,1,2,3,...]
cycle :: NonEmpty a -> NonEmpty a
cycle = fromList . List.cycle . toList
-- | 'reverse' a finite NonEmpty stream.
reverse :: NonEmpty a -> NonEmpty a
reverse = lift List.reverse
-- | @'repeat' x@ returns a constant stream, where all elements are
-- equal to @x@.
repeat :: a -> NonEmpty a
repeat a = a :| List.repeat a
-- | @'take' n xs@ returns the first @n@ elements of @xs@.
take :: Int -> NonEmpty a -> [a]
take n = List.take n . toList
-- | @'drop' n xs@ drops the first @n@ elements off the front of
-- the sequence @xs@.
drop :: Int -> NonEmpty a -> [a]
drop n = List.drop n . toList
-- | @'splitAt' n xs@ returns a pair consisting of the prefix of @xs@
-- of length @n@ and the remaining stream immediately following this prefix.
--
-- > 'splitAt' n xs == ('take' n xs, 'drop' n xs)
-- > xs == ys ++ zs where (ys, zs) = 'splitAt' n xs
splitAt :: Int -> NonEmpty a -> ([a],[a])
splitAt n = List.splitAt n . toList
-- | @'takeWhile' p xs@ returns the longest prefix of the stream
-- @xs@ for which the predicate @p@ holds.
takeWhile :: (a -> Bool) -> NonEmpty a -> [a]
takeWhile p = List.takeWhile p . toList
-- | @'dropWhile' p xs@ returns the suffix remaining after
-- @'takeWhile' p xs@.
dropWhile :: (a -> Bool) -> NonEmpty a -> [a]
dropWhile p = List.dropWhile p . toList
-- | @'span' p xs@ returns the longest prefix of @xs@ that satisfies
-- @p@, together with the remainder of the stream.
--
-- > 'span' p xs == ('takeWhile' p xs, 'dropWhile' p xs)
-- > xs == ys ++ zs where (ys, zs) = 'span' p xs
span :: (a -> Bool) -> NonEmpty a -> ([a], [a])
span p = List.span p . toList
-- | The @'break' p@ function is equivalent to @'span' (not . p)@.
break :: (a -> Bool) -> NonEmpty a -> ([a], [a])
break p = span (not . p)
-- | @'filter' p xs@ removes any elements from @xs@ that do not satisfy @p@.
filter :: (a -> Bool) -> NonEmpty a -> [a]
filter p = List.filter p . toList
-- | The 'partition' function takes a predicate @p@ and a stream
-- @xs@, and returns a pair of lists. The first list corresponds to the
-- elements of @xs@ for which @p@ holds; the second corresponds to the
-- elements of @xs@ for which @p@ does not hold.
--
-- > 'partition' p xs = ('filter' p xs, 'filter' (not . p) xs)
partition :: (a -> Bool) -> NonEmpty a -> ([a], [a])
partition p = List.partition p . toList
-- | The 'group' function takes a stream and returns a list of
-- streams such that flattening the resulting list is equal to the
-- argument. Moreover, each stream in the resulting list
-- contains only equal elements. For example, in list notation:
--
-- > 'group' $ 'cycle' "Mississippi"
-- > = "M" : "i" : "ss" : "i" : "ss" : "i" : "pp" : "i" : "M" : "i" : ...
group :: (Foldable f, Eq a) => f a -> [NonEmpty a]
group = groupBy (==)
-- | 'groupBy' operates like 'group', but uses the provided equality
-- predicate instead of `==`.
groupBy :: Foldable f => (a -> a -> Bool) -> f a -> [NonEmpty a]
groupBy eq0 = go eq0 . Foldable.toList
where
go _ [] = []
go eq (x : xs) = (x :| ys) : groupBy eq zs
where (ys, zs) = List.span (eq x) xs
-- | 'groupWith' operates like 'group', but uses the provided projection when
-- comparing for equality
groupWith :: (Foldable f, Eq b) => (a -> b) -> f a -> [NonEmpty a]
groupWith f = groupBy ((==) `on` f)
-- | 'groupAllWith' operates like 'groupWith', but sorts the list
-- first so that each equivalence class has, at most, one list in the
-- output
groupAllWith :: (Ord b) => (a -> b) -> [a] -> [NonEmpty a]
groupAllWith f = groupWith f . List.sortBy (compare `on` f)
-- | 'group1' operates like 'group', but uses the knowledge that its
-- input is non-empty to produce guaranteed non-empty output.
group1 :: Eq a => NonEmpty a -> NonEmpty (NonEmpty a)
group1 = groupBy1 (==)
-- | 'groupBy1' is to 'group1' as 'groupBy' is to 'group'.
groupBy1 :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty (NonEmpty a)
groupBy1 eq (x :| xs) = (x :| ys) :| groupBy eq zs
where (ys, zs) = List.span (eq x) xs
-- | 'groupWith1' is to 'group1' as 'groupWith' is to 'group'
groupWith1 :: (Eq b) => (a -> b) -> NonEmpty a -> NonEmpty (NonEmpty a)
groupWith1 f = groupBy1 ((==) `on` f)
-- | 'groupAllWith1' is to 'groupWith1' as 'groupAllWith' is to 'groupWith'
groupAllWith1 :: (Ord b) => (a -> b) -> NonEmpty a -> NonEmpty (NonEmpty a)
groupAllWith1 f = groupWith1 f . sortWith f
-- | The 'isPrefix' function returns @True@ if the first argument is
-- a prefix of the second.
isPrefixOf :: Eq a => [a] -> NonEmpty a -> Bool
isPrefixOf [] _ = True
isPrefixOf (y:ys) (x :| xs) = (y == x) && List.isPrefixOf ys xs
-- | @xs !! n@ returns the element of the stream @xs@ at index
-- @n@. Note that the head of the stream has index 0.
--
-- /Beware/: a negative or out-of-bounds index will cause an error.
(!!) :: NonEmpty a -> Int -> a
(!!) ~(x :| xs) n
| n == 0 = x
| n > 0 = xs List.!! (n - 1)
| otherwise = error "NonEmpty.!! negative argument"
-- | The 'zip' function takes two streams and returns a stream of
-- corresponding pairs.
zip :: NonEmpty a -> NonEmpty b -> NonEmpty (a,b)
zip ~(x :| xs) ~(y :| ys) = (x, y) :| List.zip xs ys
-- | The 'zipWith' function generalizes 'zip'. Rather than tupling
-- the elements, the elements are combined using the function
-- passed as the first argument.
zipWith :: (a -> b -> c) -> NonEmpty a -> NonEmpty b -> NonEmpty c
zipWith f ~(x :| xs) ~(y :| ys) = f x y :| List.zipWith f xs ys
-- | The 'unzip' function is the inverse of the 'zip' function.
unzip :: Functor f => f (a,b) -> (f a, f b)
unzip xs = (fst <$> xs, snd <$> xs)
-- | The 'words' function breaks a stream of characters into a
-- stream of words, which were delimited by white space.
--
-- /Beware/: if the input contains no words (i.e. is entirely
-- whitespace), this will cause an error.
words :: NonEmpty Char -> NonEmpty String
words = lift List.words
-- | The 'unwords' function is an inverse operation to 'words'. It
-- joins words with separating spaces.
--
-- /Beware/: the input @(\"\" :| [])@ will cause an error.
unwords :: NonEmpty String -> NonEmpty Char
unwords = lift List.unwords
-- | The 'lines' function breaks a stream of characters into a stream
-- of strings at newline characters. The resulting strings do not
-- contain newlines.
lines :: NonEmpty Char -> NonEmpty String
lines = lift List.lines
-- | The 'unlines' function is an inverse operation to 'lines'. It
-- joins lines, after appending a terminating newline to each.
unlines :: NonEmpty String -> NonEmpty Char
unlines = lift List.unlines
-- | The 'nub' function removes duplicate elements from a list. In
-- particular, it keeps only the first occurence of each element.
-- (The name 'nub' means \'essence\'.)
-- It is a special case of 'nubBy', which allows the programmer to
-- supply their own inequality test.
nub :: Eq a => NonEmpty a -> NonEmpty a
nub = nubBy (==)
-- | The 'nubBy' function behaves just like 'nub', except it uses a
-- user-supplied equality predicate instead of the overloaded '=='
-- function.
nubBy :: (a -> a -> Bool) -> NonEmpty a -> NonEmpty a
nubBy eq (a :| as) = a :| List.nubBy eq (List.filter (\b -> not (eq a b)) as)
-- | 'transpose' for 'NonEmpty', behaves the same as 'Data.List.transpose'
-- The rows/columns need not be the same length, in which case
-- > transpose . transpose /= id
transpose :: NonEmpty (NonEmpty a) -> NonEmpty (NonEmpty a)
transpose = fmap fromList
. fromList . List.transpose . Foldable.toList
. fmap Foldable.toList
-- | 'sortBy' for 'NonEmpty', behaves the same as 'Data.List.sortBy'
sortBy :: (a -> a -> Ordering) -> NonEmpty a -> NonEmpty a
sortBy f = lift (List.sortBy f)
-- | 'sortWith' for 'NonEmpty', behaves the same as:
--
-- > sortBy . comparing
sortWith :: Ord o => (a -> o) -> NonEmpty a -> NonEmpty a
sortWith = sortBy . comparing
| siddhanathan/ghc | libraries/base/Data/List/NonEmpty.hs | bsd-3-clause | 20,028 | 0 | 13 | 5,032 | 4,463 | 2,453 | 2,010 | 259 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Dummy.Api.Docs where
import Control.Monad.Reader (ReaderT, lift, runReaderT)
import Data.ByteString.Lazy (ByteString)
import Data.Int (Int64)
import Data.Proxy
import Data.Text.Lazy (pack)
import Data.Text.Lazy.Encoding (encodeUtf8)
import Database.Persist.Sql (toSqlKey)
import Network.HTTP.Types
import Network.Wai
import Servant.API
import Servant.Docs hiding (List)
import Servant.Server
import Dummy.Api.Api
import Dummy.Api.Config
import Dummy.Api.Models
instance ToCapture (Capture "id" Int64) where
toCapture _ =
DocCapture "id" -- name
"(integer) ID of the Resource" -- description
instance ToSample () () where
toSample _ = Nothing -- example of output
john = User "John Doe" "[email protected]"
jane = User "Jane Doe" "[email protected]"
board = Board "Sample Board" (toSqlKey 1)
list = List "Sample List" (toSqlKey 1)
card = Card "Sample title" "Sample description" (toSqlKey 1) (toSqlKey 1)
instance ToSample [User] [User] where
toSample _ = Just [john, jane]
instance ToSample User User where
toSample _ = Just john
instance ToSample [Board] [Board] where
toSample _ = Just [board]
instance ToSample Board Board where
toSample _ = Just (Board "Board Name" (toSqlKey 1)) -- example of output
instance ToSample [List] [List] where
toSample _ = Just [list]
instance ToSample List List where
toSample _ = Just list
instance ToSample [Card] [Card] where
toSample _ = Just [card]
instance ToSample Card Card where
toSample _ = Just card
apiDocs :: API
apiDocs = docs dummyAPI
main = putStrLn $ markdown apiDocs
| denibertovic/dummy-api | src/Dummy/Api/Docs.hs | bsd-3-clause | 2,081 | 0 | 10 | 556 | 520 | 287 | 233 | 53 | 1 |
--
-- Copyright © 2013-2015 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
module Git.Vogue where
import Control.Applicative
import Control.Monad
import Control.Monad.IO.Class
import Data.Foldable
import Data.Maybe
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy.IO as T
import Data.Traversable hiding (sequence)
import Formatting
import Prelude hiding (elem, maximum)
import System.Directory
import System.Exit
import Git.Vogue.Types
-- | Execute a git-vogue command.
runCommand
:: forall m. (Applicative m, MonadIO m, Functor m)
=> VogueCommand
-> SearchMode
-> VCS m
-> PluginDiscoverer m
-> m ()
runCommand cmd search_mode VCS{..} PluginDiscoverer{..} = go cmd
where
cd = getTopLevel >>= liftIO . setCurrentDirectory
go CmdInit = do
cd
already_there <- checkHook
if already_there
then success "Pre-commit hook is already installed"
else do
installHook
installed <- checkHook
if installed
then success "Successfully installed hook"
else failure "Hook failed to install"
go CmdVerify = do
cd
installed <- checkHook
if installed
then success "Pre-commit hook currently installed"
else failure "Pre-commit hook not installed"
go CmdPlugins = do
liftIO $ T.putStrLn "git-vogue knows about the following plugins:\n"
discoverPlugins >>= liftIO . traverse_ print
go (CmdDisable plugin) = do
plugins <- discoverPlugins
if plugin `elem` fmap pluginName (filter (not . enabled) plugins)
then success "Plugin already disabled"
else
if plugin `elem` fmap pluginName plugins
then do
disablePlugin plugin
success "Disabled plugin"
else
failure "Unknown plugin"
go (CmdEnable plugin) = do
ps <- discoverPlugins
if plugin `elem` fmap pluginName ps
then
if plugin `elem` (pluginName <$> filter (not . enabled) ps)
then do
enablePlugin plugin
success "Enabled plugin"
else
success "Plugin already enabled"
else
failure "Unknown plugin"
go CmdRunCheck = do
(check_fs, all_fs, plugins) <- things
rs <- for plugins $ \p -> do
r <- runCheck p check_fs all_fs
liftIO . T.putStrLn $ colorize p r
return r
exitWithWorst rs
go CmdRunFix = do
(check_fs, all_fs, plugins) <- things
rs <- for plugins $ \p -> do
r <- runCheck p check_fs all_fs
case r of
Failure{} -> do
r' <- runFix p check_fs all_fs
liftIO . T.putStrLn $ colorize p r'
return $ Just r'
_ -> return Nothing
exitWithWorst (catMaybes rs)
things = do
cd
check_fs <- getFiles search_mode
when (null check_fs) (success "Vacuous success - Nothing to check")
plugins <- filter enabled <$> discoverPlugins
when (null check_fs) (success "Vacuous success - No plugins enabled")
all_fs <- getFiles FindAll
return (check_fs, all_fs, plugins)
success, failure :: MonadIO m => Text -> m a
success msg = liftIO (T.putStrLn msg >> exitSuccess)
failure msg = liftIO (T.putStrLn msg >> exitFailure)
-- | Output the results of a run and exit with an appropriate return code
exitWithWorst
:: MonadIO m
=> [Result]
-> m ()
exitWithWorst [] = liftIO exitSuccess
exitWithWorst rs = liftIO $
case maximum rs of
Success{} -> exitSuccess
Failure{} -> exitWith $ ExitFailure 1
Catastrophe{} -> exitWith $ ExitFailure 2
colorize
:: Plugin a
-> Result
-> Text
colorize Plugin{..} (Success txt) =
format ("\x1b[32m"
% text
% " succeeded\x1b[0m with:\n"
% text) (unPluginName pluginName) txt
colorize Plugin{..} (Failure txt) =
format ("\x1b[31m"
% text
% " failed\x1b[0m with:\n"
% text) (unPluginName pluginName) txt
colorize Plugin{..} (Catastrophe txt ret) =
format ("\x1b[31m"
% text
% " exploded \x1b[0m with exit code "
% int
%":\n"
% text) (unPluginName pluginName) txt ret
| olorin/git-vogue | lib/Git/Vogue.hs | bsd-3-clause | 5,300 | 0 | 21 | 1,893 | 1,200 | 602 | 598 | 130 | 15 |
{-| This module contains the top-level entrypoint and options parsing for the
@dhall@ executable
-}
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Dhall.Main
( -- * Options
Options(..)
, Mode(..)
, ResolveMode(..)
, parseOptions
, parserInfoOptions
-- * Execution
, Dhall.Main.command
, main
) where
import Control.Applicative (optional, (<|>))
import Control.Exception (Handler (..), SomeException)
import Control.Monad (when)
import Data.Foldable (for_)
import Data.List.NonEmpty (NonEmpty (..), nonEmpty)
import Data.Maybe (fromMaybe)
import Data.Text (Text)
import Data.Void (Void)
import Dhall.Freeze (Intent (..), Scope (..))
import Dhall.Import
( Depends (..)
, Imported (..)
, SemanticCacheMode (..)
, _semanticCacheMode
)
import Dhall.Parser (Src)
import Dhall.Pretty
( Ann
, CharacterSet (..)
, annToAnsiStyle
, detectCharacterSet
)
import Dhall.Schemas (Schemas (..))
import Dhall.TypeCheck (Censored (..), DetailedTypeError (..), TypeError)
import Dhall.Version (dhallVersionString)
import Options.Applicative (Parser, ParserInfo)
import Prettyprinter (Doc, Pretty)
import System.Exit (ExitCode, exitFailure)
import System.IO (Handle)
import Text.Dot ((.->.))
import Dhall.Core
( Expr (Annot)
, Import (..)
, ImportHashed (..)
, ImportType (..)
, URL (..)
, pretty
)
import Dhall.Util
( Censor (..)
, CheckFailed (..)
, Header (..)
, Input (..)
, Output (..)
, OutputMode (..)
, Transitivity (..)
, handleMultipleChecksFailed
)
import qualified Codec.CBOR.JSON
import qualified Codec.CBOR.Read
import qualified Codec.CBOR.Write
import qualified Control.Exception
import qualified Control.Monad.Trans.State.Strict as State
import qualified Data.Aeson
import qualified Data.Aeson.Encode.Pretty
import qualified Data.ByteString.Lazy
import qualified Data.ByteString.Lazy.Char8
import qualified Data.Map
import qualified Data.Text
import qualified Data.Text.IO
import qualified Dhall
import qualified Dhall.Binary
import qualified Dhall.Core
import qualified Dhall.Diff
import qualified Dhall.DirectoryTree as DirectoryTree
import qualified Dhall.Format
import qualified Dhall.Freeze
import qualified Dhall.Import
import qualified Dhall.Import.Types
import qualified Dhall.Lint
import qualified Dhall.Map
import qualified Dhall.Pretty
import qualified Dhall.Repl
import qualified Dhall.Schemas
import qualified Dhall.Tags
import qualified Dhall.TypeCheck
import qualified Dhall.Util
import qualified GHC.IO.Encoding
import qualified Options.Applicative
import qualified Prettyprinter as Pretty
import qualified Prettyprinter.Render.Terminal as Pretty
import qualified Prettyprinter.Render.Text as Pretty.Text
import qualified System.AtomicWrite.Writer.LazyText as AtomicWrite.LazyText
import qualified System.Console.ANSI
import qualified System.Exit as Exit
import qualified System.FilePath
import qualified System.IO
import qualified Text.Dot
import qualified Text.Pretty.Simple
-- | Top-level program options
data Options = Options
{ mode :: Mode
, explain :: Bool
, plain :: Bool
, chosenCharacterSet :: Maybe CharacterSet
, censor :: Censor
}
-- | The subcommands for the @dhall@ executable
data Mode
= Default
{ file :: Input
, output :: Output
, annotate :: Bool
, alpha :: Bool
, semanticCacheMode :: SemanticCacheMode
, version :: Bool
}
| Version
| Resolve
{ file :: Input
, resolveMode :: Maybe ResolveMode
, semanticCacheMode :: SemanticCacheMode
}
| Type
{ file :: Input
, quiet :: Bool
, semanticCacheMode :: SemanticCacheMode
}
| Normalize { file :: Input , alpha :: Bool }
| Repl
| Format { deprecatedInPlace :: Bool, transitivity :: Transitivity, outputMode :: OutputMode, inputs :: NonEmpty Input }
| Freeze { deprecatedInPlace :: Bool, transitivity :: Transitivity, all_ :: Bool, cache :: Bool, outputMode :: OutputMode, inputs :: NonEmpty Input }
| Hash { file :: Input, cache :: Bool }
| Diff { expr1 :: Text, expr2 :: Text }
| Lint { deprecatedInPlace :: Bool, transitivity :: Transitivity, outputMode :: OutputMode, inputs :: NonEmpty Input }
| Tags
{ input :: Input
, output :: Output
, suffixes :: Maybe [Text]
, followSymlinks :: Bool
}
| Encode { file :: Input, json :: Bool }
| Decode { file :: Input, json :: Bool, quiet :: Bool }
| Text { file :: Input, output :: Output }
| DirectoryTree { file :: Input, path :: FilePath }
| Schemas { file :: Input, outputMode :: OutputMode, schemas :: Text }
| SyntaxTree { file :: Input, noted :: Bool }
-- | This specifies how to resolve transitive dependencies
data ResolveMode
= Dot
-- ^ Generate a DOT file for @graphviz@
| ListTransitiveDependencies
-- ^ List all transitive dependencies as text, one per line
| ListImmediateDependencies
-- ^ List immediate dependencies as text, one per line
-- | Groups of subcommands
data Group
= Manipulate
| Generate
| Interpret
| Convert
| Miscellaneous
| Debugging
groupDescription :: Group -> String
groupDescription group = case group of
Manipulate -> "Manipulate Dhall code"
Generate -> "Generate other formats from Dhall"
Interpret -> "Interpret Dhall"
Convert -> "Convert Dhall to and from its binary representation"
Miscellaneous -> "Miscellaneous"
Debugging -> "Debugging this interpreter"
-- | `Parser` for the `Options` type
parseOptions :: Parser Options
parseOptions =
Options
<$> parseMode
<*> switch "explain" "Explain error messages in more detail"
<*> switch "plain" "Disable syntax highlighting"
<*> parseCharacterSet
<*> parseCensor
where
switch name description =
Options.Applicative.switch
( Options.Applicative.long name
<> Options.Applicative.help description
)
parseCensor = fmap f (switch "censor" "Hide source code in error messages")
where
f True = Censor
f False = NoCensor
parseCharacterSet =
Options.Applicative.flag'
(Just Unicode)
( Options.Applicative.long "unicode"
<> Options.Applicative.help "Format code using only Unicode syntax"
)
<|> Options.Applicative.flag'
(Just ASCII)
( Options.Applicative.long "ascii"
<> Options.Applicative.help "Format code using only ASCII syntax"
)
<|> pure Nothing
subcommand :: Group -> String -> String -> Parser a -> Parser a
subcommand group name description parser =
Options.Applicative.hsubparser
( Options.Applicative.command name parserInfo
<> Options.Applicative.metavar name
<> Options.Applicative.commandGroup (groupDescription group)
)
where
parserInfo =
Options.Applicative.info parser
( Options.Applicative.fullDesc
<> Options.Applicative.progDesc description
)
parseMode :: Parser Mode
parseMode =
subcommand
Manipulate
"format"
"Standard code formatter for the Dhall language"
(Format <$> deprecatedInPlace <*> parseTransitiveSwitch <*> parseCheck "formatted" <*> parseFiles)
<|> subcommand
Manipulate
"freeze"
"Add integrity checks to remote import statements of an expression"
(Freeze <$> deprecatedInPlace <*> parseTransitiveSwitch <*> parseAllFlag <*> parseCacheFlag <*> parseCheck "frozen" <*> parseFiles)
<|> subcommand
Manipulate
"lint"
"Improve Dhall code by using newer language features and removing dead code"
(Lint <$> deprecatedInPlace <*> parseTransitiveSwitch <*> parseCheck "linted" <*> parseFiles)
<|> subcommand
Manipulate
"rewrite-with-schemas"
"Simplify Dhall code using a schemas record"
(Dhall.Main.Schemas <$> parseInplaceNonTransitive <*> parseCheck "rewritten" <*> parseSchemasRecord)
<|> subcommand
Generate
"text"
"Render a Dhall expression that evaluates to a Text literal"
(Text <$> parseFile <*> parseOutput)
<|> subcommand
Generate
"to-directory-tree"
"Convert nested records of Text literals into a directory tree"
(DirectoryTree <$> parseFile <*> parseDirectoryTreeOutput)
<|> subcommand
Interpret
"resolve"
"Resolve an expression's imports"
(Resolve <$> parseFile <*> parseResolveMode <*> parseSemanticCacheMode)
<|> subcommand
Interpret
"type"
"Infer an expression's type"
(Type <$> parseFile <*> parseQuiet <*> parseSemanticCacheMode)
<|> subcommand
Interpret
"normalize"
"Normalize an expression"
(Normalize <$> parseFile <*> parseAlpha)
<|> subcommand
Convert
"encode"
"Encode a Dhall expression to binary"
(Encode <$> parseFile <*> parseJSONFlag)
<|> subcommand
Convert
"decode"
"Decode a Dhall expression from binary"
(Decode <$> parseFile <*> parseJSONFlag <*> parseQuiet)
<|> subcommand
Miscellaneous
"repl"
"Interpret expressions in a REPL"
(pure Repl)
<|> subcommand
Miscellaneous
"diff"
"Render the difference between the normal form of two expressions"
(Diff <$> argument "expr1" <*> argument "expr2")
<|> subcommand
Miscellaneous
"hash"
"Compute semantic hashes for Dhall expressions"
(Hash <$> parseFile <*> parseCache)
<|> subcommand
Miscellaneous
"tags"
"Generate etags file"
(Tags <$> parseInput <*> parseTagsOutput <*> parseSuffixes <*> parseFollowSymlinks)
<|> subcommand
Miscellaneous
"version"
"Display version"
(pure Version)
<|> subcommand
Debugging
"haskell-syntax-tree"
"Output the parsed syntax tree (for debugging)"
(SyntaxTree <$> parseFile <*> parseNoted)
<|> ( Default
<$> parseFile
<*> parseOutput
<*> parseAnnotate
<*> parseAlpha
<*> parseSemanticCacheMode
<*> parseVersion
)
where
deprecatedInPlace =
Options.Applicative.switch
( Options.Applicative.long "inplace"
<> Options.Applicative.internal -- completely hidden from help
)
argument =
fmap Data.Text.pack
. Options.Applicative.strArgument
. Options.Applicative.metavar
parseFile = fmap f (optional p)
where
f Nothing = StandardInput
f (Just file) = InputFile file
p = Options.Applicative.strOption
( Options.Applicative.long "file"
<> Options.Applicative.help "Read expression from a file instead of standard input"
<> Options.Applicative.metavar "FILE"
<> Options.Applicative.action "file"
)
parseFiles = fmap f (Options.Applicative.many p)
where
-- Parse explicit stdin in the input filepaths
parseStdin inputs
| InputFile "-" `elem` inputs = StandardInput : filter (/= InputFile "-") inputs
| otherwise = inputs
f = fromMaybe (pure StandardInput) . nonEmpty . parseStdin . fmap InputFile
p = Options.Applicative.strArgument
( Options.Applicative.help "Read expression from files instead of standard input"
<> Options.Applicative.metavar "FILES"
<> Options.Applicative.action "file"
)
parseOutput = fmap f (optional p)
where
f Nothing = StandardOutput
f (Just file) = OutputFile file
p = Options.Applicative.strOption
( Options.Applicative.long "output"
<> Options.Applicative.help "Write result to a file instead of standard output"
<> Options.Applicative.metavar "FILE"
<> Options.Applicative.action "file"
)
parseAlpha =
Options.Applicative.switch
( Options.Applicative.long "alpha"
<> Options.Applicative.help "α-normalize expression"
)
parseAnnotate =
Options.Applicative.switch
( Options.Applicative.long "annotate"
<> Options.Applicative.help "Add a type annotation to the output"
)
parseSemanticCacheMode =
Options.Applicative.flag
UseSemanticCache
IgnoreSemanticCache
( Options.Applicative.long "no-cache"
<> Options.Applicative.help
"Handle protected imports as if the cache was empty"
)
parseVersion =
Options.Applicative.switch
( Options.Applicative.long "version"
<> Options.Applicative.help "Display version"
)
parseResolveMode =
Options.Applicative.flag' (Just Dot)
( Options.Applicative.long "dot"
<> Options.Applicative.help
"Output import dependency graph in dot format"
)
<|>
Options.Applicative.flag' (Just ListImmediateDependencies)
( Options.Applicative.long "immediate-dependencies"
<> Options.Applicative.help
"List immediate import dependencies"
)
<|>
Options.Applicative.flag' (Just ListTransitiveDependencies)
( Options.Applicative.long "transitive-dependencies"
<> Options.Applicative.help
"List transitive import dependencies in post-order"
)
<|> pure Nothing
parseQuiet =
Options.Applicative.switch
( Options.Applicative.long "quiet"
<> Options.Applicative.help "Don't print the result"
)
parseInplace =
Options.Applicative.strOption
( Options.Applicative.long "inplace"
<> Options.Applicative.help "Modify the specified file in-place"
<> Options.Applicative.metavar "FILE"
<> Options.Applicative.action "file"
)
parseTransitiveSwitch = Options.Applicative.flag NonTransitive Transitive
( Options.Applicative.long "transitive"
<> Options.Applicative.help "Modify the input and its transitive relative imports in-place"
)
parseInplaceNonTransitive =
fmap InputFile parseInplace
<|> pure StandardInput
parseInput = fmap f (optional p)
where
f Nothing = StandardInput
f (Just path) = InputFile path
p = Options.Applicative.strOption
( Options.Applicative.long "path"
<> Options.Applicative.help "Index all files in path recursively. Will get list of files from STDIN if omitted."
<> Options.Applicative.metavar "PATH"
<> Options.Applicative.action "file"
<> Options.Applicative.action "directory"
)
parseTagsOutput = fmap f (optional p)
where
f Nothing = OutputFile "tags"
f (Just file) = OutputFile file
p = Options.Applicative.strOption
( Options.Applicative.long "output"
<> Options.Applicative.help "The name of the file that the tags are written to. Defaults to \"tags\""
<> Options.Applicative.metavar "FILENAME"
<> Options.Applicative.action "file"
)
parseSuffixes = fmap f (optional p)
where
f Nothing = Just [".dhall"]
f (Just "") = Nothing
f (Just line) = Just (Data.Text.splitOn " " line)
p = Options.Applicative.strOption
( Options.Applicative.long "suffixes"
<> Options.Applicative.help "Index only files with suffixes. \"\" to index all files."
<> Options.Applicative.metavar "SUFFIXES"
)
parseFollowSymlinks =
Options.Applicative.switch
( Options.Applicative.long "follow-symlinks"
<> Options.Applicative.help "Follow symlinks when recursing directories"
)
parseJSONFlag =
Options.Applicative.switch
( Options.Applicative.long "json"
<> Options.Applicative.help "Use JSON representation of CBOR"
)
parseAllFlag =
Options.Applicative.switch
( Options.Applicative.long "all"
<> Options.Applicative.help "Add integrity checks to all imports (not just remote imports)"
)
parseCacheFlag =
Options.Applicative.switch
( Options.Applicative.long "cache"
<> Options.Applicative.help "Add fallback unprotected imports when using integrity checks purely for caching purposes"
)
parseCheck processed = fmap adapt switch
where
adapt True = Check
adapt False = Write
switch =
Options.Applicative.switch
( Options.Applicative.long "check"
<> Options.Applicative.help ("Only check if the input is " <> processed)
)
parseSchemasRecord =
Options.Applicative.strOption
( Options.Applicative.long "schemas"
<> Options.Applicative.help "A record of schemas"
<> Options.Applicative.metavar "EXPR"
)
parseDirectoryTreeOutput =
Options.Applicative.strOption
( Options.Applicative.long "output"
<> Options.Applicative.help "The destination path to create"
<> Options.Applicative.metavar "PATH"
<> Options.Applicative.action "directory"
)
parseNoted =
Options.Applicative.switch
( Options.Applicative.long "noted"
<> Options.Applicative.help "Print `Note` constructors"
)
parseCache =
Options.Applicative.switch
( Options.Applicative.long "cache"
<> Options.Applicative.help "Cache the hashed expression"
)
-- | `ParserInfo` for the `Options` type
parserInfoOptions :: ParserInfo Options
parserInfoOptions =
Options.Applicative.info
(Options.Applicative.helper <*> parseOptions)
( Options.Applicative.progDesc "Interpreter for the Dhall language"
<> Options.Applicative.fullDesc
)
noHeaders :: Import -> Import
noHeaders
(Import { importHashed = ImportHashed { importType = Remote URL{ .. }, ..}, .. }) =
Import { importHashed = ImportHashed { importType = Remote URL{ headers = Nothing, .. }, .. }, .. }
noHeaders i =
i
-- | Run the command specified by the `Options` type
command :: Options -> IO ()
command (Options {..}) = do
GHC.IO.Encoding.setLocaleEncoding System.IO.utf8
let rootDirectory = \case
InputFile f -> System.FilePath.takeDirectory f
StandardInput -> "."
let toStatus = Dhall.Import.emptyStatus . rootDirectory
let getExpression = Dhall.Util.getExpression censor
-- The characterSet detection used here only works on the source
-- expression, before any transformation is applied. This helper is there
-- make sure the detection is done on the correct expr.
let getExpressionAndCharacterSet file = do
expr <- getExpression file
let characterSet = fromMaybe (detectCharacterSet expr) chosenCharacterSet
return (expr, characterSet)
let handle io =
Control.Exception.catches io
[ Handler handleTypeError
, Handler handleImported
, Handler handleExitCode
]
where
handleAll e = do
let string = show (e :: SomeException)
if not (null string)
then System.IO.hPutStrLn System.IO.stderr string
else return ()
System.Exit.exitFailure
handleTypeError e = Control.Exception.handle handleAll $ do
let _ = e :: TypeError Src Void
System.IO.hPutStrLn System.IO.stderr ""
if explain
then
case censor of
Censor -> Control.Exception.throwIO (CensoredDetailed (DetailedTypeError e))
NoCensor -> Control.Exception.throwIO (DetailedTypeError e)
else do
Data.Text.IO.hPutStrLn System.IO.stderr "\ESC[2mUse \"dhall --explain\" for detailed errors\ESC[0m"
case censor of
Censor -> Control.Exception.throwIO (Censored e)
NoCensor -> Control.Exception.throwIO e
handleImported (Imported ps e) = Control.Exception.handle handleAll $ do
let _ = e :: TypeError Src Void
System.IO.hPutStrLn System.IO.stderr ""
if explain
then Control.Exception.throwIO (Imported ps (DetailedTypeError e))
else do
Data.Text.IO.hPutStrLn System.IO.stderr "\ESC[2mUse \"dhall --explain\" for detailed errors\ESC[0m"
Control.Exception.throwIO (Imported ps e)
handleExitCode e =
Control.Exception.throwIO (e :: ExitCode)
let renderDoc :: Handle -> Doc Ann -> IO ()
renderDoc h doc = do
let stream = Dhall.Pretty.layout doc
supportsANSI <- System.Console.ANSI.hSupportsANSI h
let ansiStream =
if supportsANSI && not plain
then fmap annToAnsiStyle stream
else Pretty.unAnnotateS stream
Pretty.renderIO h ansiStream
Data.Text.IO.hPutStrLn h ""
let render :: Pretty a => Handle -> CharacterSet -> Expr Src a -> IO ()
render h characterSet expression = do
let doc = Dhall.Pretty.prettyCharacterSet characterSet expression
renderDoc h doc
let writeDocToFile :: FilePath -> Doc ann -> IO ()
writeDocToFile file doc = do
let stream = Dhall.Pretty.layout (doc <> "\n")
AtomicWrite.LazyText.atomicWriteFile file (Pretty.Text.renderLazy stream)
handle $ case mode of
Version ->
putStrLn dhallVersionString
Default {..} -> do
if version
then do
putStrLn dhallVersionString
Exit.exitSuccess
else return ()
(expression, characterSet) <- getExpressionAndCharacterSet file
resolvedExpression <-
Dhall.Import.loadRelativeTo (rootDirectory file) semanticCacheMode expression
inferredType <- Dhall.Core.throws (Dhall.TypeCheck.typeOf resolvedExpression)
let normalizedExpression = Dhall.Core.normalize resolvedExpression
let alphaNormalizedExpression =
if alpha
then Dhall.Core.alphaNormalize normalizedExpression
else normalizedExpression
let annotatedExpression =
if annotate
then Annot alphaNormalizedExpression inferredType
else alphaNormalizedExpression
case output of
StandardOutput -> render System.IO.stdout characterSet annotatedExpression
OutputFile file_ ->
writeDocToFile
file_
(Dhall.Pretty.prettyCharacterSet characterSet annotatedExpression)
Resolve { resolveMode = Just Dot, ..} -> do
expression <- getExpression file
(Dhall.Import.Types.Status { _graph, _stack }) <-
State.execStateT (Dhall.Import.loadWith expression) (toStatus file) { _semanticCacheMode = semanticCacheMode }
let (rootImport :| _) = _stack
imports = rootImport : map parent _graph ++ map child _graph
importIds = Data.Map.fromList (zip imports [Text.Dot.userNodeId i | i <- [0..]])
let dotNode (i, nodeId) =
Text.Dot.userNode
nodeId
[ ("label", Data.Text.unpack $ pretty (convert i))
, ("shape", "box")
, ("style", "rounded")
]
where
convert = noHeaders . Dhall.Import.chainedImport
let dotEdge (Depends parent child) =
case (Data.Map.lookup parent importIds, Data.Map.lookup child importIds) of
(Just from, Just to) -> from .->. to
_ -> pure ()
let dot = do Text.Dot.attribute ("rankdir", "LR")
mapM_ dotNode (Data.Map.assocs importIds)
mapM_ dotEdge _graph
putStr . ("strict " <>) . Text.Dot.showDot $ dot
Resolve { resolveMode = Just ListImmediateDependencies, ..} -> do
expression <- getExpression file
mapM_ (print . Pretty.pretty . noHeaders) expression
Resolve { resolveMode = Just ListTransitiveDependencies, ..} -> do
expression <- getExpression file
(Dhall.Import.Types.Status { _cache }) <-
State.execStateT (Dhall.Import.loadWith expression) (toStatus file) { _semanticCacheMode = semanticCacheMode }
mapM_ print
. fmap ( Pretty.pretty
. noHeaders
. Dhall.Import.chainedImport
)
. reverse
. Dhall.Map.keys
$ _cache
Resolve { resolveMode = Nothing, ..} -> do
(expression, characterSet) <- getExpressionAndCharacterSet file
resolvedExpression <-
Dhall.Import.loadRelativeTo (rootDirectory file) semanticCacheMode expression
render System.IO.stdout characterSet resolvedExpression
Normalize {..} -> do
(expression, characterSet) <- getExpressionAndCharacterSet file
resolvedExpression <- Dhall.Import.assertNoImports expression
_ <- Dhall.Core.throws (Dhall.TypeCheck.typeOf resolvedExpression)
let normalizedExpression = Dhall.Core.normalize resolvedExpression
let alphaNormalizedExpression =
if alpha
then Dhall.Core.alphaNormalize normalizedExpression
else normalizedExpression
render System.IO.stdout characterSet alphaNormalizedExpression
Type {..} -> do
(expression, characterSet) <- getExpressionAndCharacterSet file
resolvedExpression <-
Dhall.Import.loadRelativeTo (rootDirectory file) semanticCacheMode expression
inferredType <- Dhall.Core.throws (Dhall.TypeCheck.typeOf resolvedExpression)
if quiet
then return ()
else render System.IO.stdout characterSet inferredType
Repl ->
Dhall.Repl.repl
(fromMaybe Unicode chosenCharacterSet) -- Default to Unicode if no characterSet specified
explain
Diff {..} -> do
expression1 <- Dhall.inputExpr expr1
expression2 <- Dhall.inputExpr expr2
let diff = Dhall.Diff.diffNormalized expression1 expression2
renderDoc System.IO.stdout (Dhall.Diff.doc diff)
if Dhall.Diff.same diff
then return ()
else Exit.exitFailure
Format {..} -> do
when deprecatedInPlace $
System.IO.hPutStrLn System.IO.stderr "Warning: the flag \"--inplace\" is deprecated"
Dhall.Format.format Dhall.Format.Format{..}
Freeze {..} -> do
when deprecatedInPlace $
System.IO.hPutStrLn System.IO.stderr "Warning: the flag \"--inplace\" is deprecated"
let scope = if all_ then AllImports else OnlyRemoteImports
let intent = if cache then Cache else Secure
Dhall.Freeze.freeze outputMode transitivity inputs scope intent chosenCharacterSet censor
Hash {..} -> do
expression <- getExpression file
resolvedExpression <-
Dhall.Import.loadRelativeTo (rootDirectory file) UseSemanticCache expression
_ <- Dhall.Core.throws (Dhall.TypeCheck.typeOf resolvedExpression)
let normalizedExpression =
Dhall.Core.alphaNormalize (Dhall.Core.normalize resolvedExpression)
if cache
then Dhall.Import.writeExpressionToSemanticCache normalizedExpression
else return ()
Data.Text.IO.putStrLn (Dhall.Import.hashExpressionToCode normalizedExpression)
Lint { transitivity = transitivity0, ..} -> do
when deprecatedInPlace $
System.IO.hPutStrLn System.IO.stderr "Warning: the flag \"--inplace\" is deprecated"
handleMultipleChecksFailed "lint" "linted" go inputs
where
go input = do
let directory = case input of
StandardInput -> "."
InputFile file -> System.FilePath.takeDirectory file
let status = Dhall.Import.emptyStatus directory
(inputName, originalText, transitivity) <- case input of
InputFile file -> do
text <- Data.Text.IO.readFile file
return (file, text, transitivity0)
StandardInput -> do
text <- Data.Text.IO.getContents
return ("(input)", text, NonTransitive)
(Header header, parsedExpression) <-
Dhall.Util.getExpressionAndHeaderFromStdinText censor inputName originalText
let characterSet = fromMaybe (detectCharacterSet parsedExpression) chosenCharacterSet
case transitivity of
Transitive ->
for_ parsedExpression $ \import_ -> do
maybeFilepath <- Dhall.Import.dependencyToFile status import_
for_ maybeFilepath $ \filepath ->
go (InputFile filepath)
NonTransitive ->
return ()
let lintedExpression = Dhall.Lint.lint parsedExpression
let doc = Pretty.pretty header
<> Dhall.Pretty.prettyCharacterSet characterSet lintedExpression
let stream = Dhall.Pretty.layout doc
let modifiedText = Pretty.Text.renderStrict stream <> "\n"
case outputMode of
Write -> do
case input of
InputFile file ->
if originalText == modifiedText
then return ()
else writeDocToFile file doc
StandardInput ->
renderDoc System.IO.stdout doc
return (Right ())
Check ->
return $
if originalText == modifiedText
then Right ()
else Left CheckFailed{..}
Encode {..} -> do
expression <- getExpression file
let bytes = Dhall.Binary.encodeExpression (Dhall.Core.denote expression)
if json
then do
let decoder = Codec.CBOR.JSON.decodeValue False
(_, value) <- Dhall.Core.throws (Codec.CBOR.Read.deserialiseFromBytes decoder bytes)
let jsonBytes = Data.Aeson.Encode.Pretty.encodePretty value
Data.ByteString.Lazy.Char8.putStrLn jsonBytes
else
Data.ByteString.Lazy.putStr bytes
Decode {..} -> do
bytes <-
case file of
InputFile f -> Data.ByteString.Lazy.readFile f
StandardInput -> Data.ByteString.Lazy.getContents
expression <-
if json
then do
value <- case Data.Aeson.eitherDecode' bytes of
Left string -> fail string
Right value -> return value
let encoding = Codec.CBOR.JSON.encodeValue value
let cborgBytes = Codec.CBOR.Write.toLazyByteString encoding
Dhall.Core.throws (Dhall.Binary.decodeExpression cborgBytes)
else
Dhall.Core.throws (Dhall.Binary.decodeExpression bytes)
if quiet
then return ()
else do
let doc =
Dhall.Pretty.prettyCharacterSet
(fromMaybe Unicode chosenCharacterSet) -- default to Unicode
(Dhall.Core.renote expression :: Expr Src Import)
renderDoc System.IO.stdout doc
Text {..} -> do
expression <- getExpression file
resolvedExpression <-
Dhall.Import.loadRelativeTo (rootDirectory file) UseSemanticCache expression
_ <- Dhall.Core.throws (Dhall.TypeCheck.typeOf (Annot resolvedExpression Dhall.Core.Text))
let normalizedExpression = Dhall.Core.normalize resolvedExpression
case normalizedExpression of
Dhall.Core.TextLit (Dhall.Core.Chunks [] text) ->
let write = case output of
StandardOutput -> Data.Text.IO.putStr
OutputFile file_ -> Data.Text.IO.writeFile file_
in write text
_ -> do
let invalidDecoderExpected :: Expr Void Void
invalidDecoderExpected = Dhall.Core.Text
let invalidDecoderExpression :: Expr Void Void
invalidDecoderExpression = normalizedExpression
Control.Exception.throwIO (Dhall.InvalidDecoder {..})
Tags {..} -> do
tags <- Dhall.Tags.generate input suffixes followSymlinks
case output of
OutputFile file ->
System.IO.withFile file System.IO.WriteMode (`Data.Text.IO.hPutStr` tags)
StandardOutput -> Data.Text.IO.putStrLn tags
DirectoryTree {..} -> do
expression <- getExpression file
resolvedExpression <-
Dhall.Import.loadRelativeTo (rootDirectory file) UseSemanticCache expression
_ <- Dhall.Core.throws (Dhall.TypeCheck.typeOf resolvedExpression)
let normalizedExpression = Dhall.Core.normalize resolvedExpression
DirectoryTree.toDirectoryTree path normalizedExpression
Dhall.Main.Schemas{..} ->
Dhall.Schemas.schemasCommand Dhall.Schemas.Schemas{ input = file, ..}
SyntaxTree {..} -> do
expression <- getExpression file
if noted then
Text.Pretty.Simple.pPrintNoColor expression
else
let denoted :: Expr Void Import
denoted = Dhall.Core.denote expression
in Text.Pretty.Simple.pPrintNoColor denoted
-- | Entry point for the @dhall@ executable
main :: IO ()
main = do
options <- Options.Applicative.execParser parserInfoOptions
Dhall.Main.command options
| Gabriel439/Haskell-Dhall-Library | dhall/src/Dhall/Main.hs | bsd-3-clause | 36,923 | 9 | 29 | 12,704 | 7,382 | 3,924 | 3,458 | 758 | 55 |
module Language.Fw.Lexer where
import Text.Parsec
import Text.Parsec.Language (emptyDef)
import qualified Text.Parsec.Token as P
lambdaDef = emptyDef
{ P.commentStart = "{-"
, P.commentEnd = "-}"
, P.commentLine = "--"
, P.nestedComments = True
, P.identStart = letter
, P.identLetter = alphaNum <|> oneOf "_'"
, P.opStart = P.opLetter lambdaDef
-- , P.opLetter = oneOf "=+/*-~><&|\\"
, P.opLetter = oneOf "=\\.->:*+/"
, P.reservedNames =
[ "pack", "as"
, "unpack"
, "let", "in"
, "True", "False"
, "*"
, "forall"
, "exists"
, "String", "Int", "Bool"
]
}
lexer :: P.TokenParser ()
lexer = P.makeTokenParser lambdaDef
parens = P.parens lexer
identifier = P.identifier lexer
reserved = P.reserved lexer
integer = P.integer lexer
strLiteral = P.stringLiteral lexer
operator = P.operator lexer
reservedOp = P.reservedOp lexer
whitespace = P.whiteSpace lexer
dot = P.dot lexer
semi = P.semi lexer
braces = P.braces lexer
brackets = P.brackets lexer
angles = P.angles lexer
commaSep = P.commaSep lexer
colon = P.colon lexer
comma = P.comma lexer
lambda = reservedOp "\\"
equals = reservedOp "="
arrow = reservedOp "->"
allOf p = do
whitespace
r <- p
eof
return r
| WraithM/fingmodules | src/Language/Fw/Lexer.hs | bsd-3-clause | 1,364 | 0 | 8 | 387 | 399 | 218 | 181 | 48 | 1 |
{-# LANGUAGE CPP, GADTs #-}
{-# OPTIONS_GHC -fno-warn-type-defaults #-}
-- ----------------------------------------------------------------------------
-- | Handle conversion of CmmProc to LLVM code.
--
module LlvmCodeGen.CodeGen ( genLlvmProc ) where
#include "HsVersions.h"
import GhcPrelude
import Llvm
import LlvmCodeGen.Base
import LlvmCodeGen.Regs
import BlockId
import CodeGen.Platform ( activeStgRegs, callerSaves )
import CLabel
import Cmm
import PprCmm
import CmmUtils
import CmmSwitch
import Hoopl.Block
import Hoopl.Graph
import Hoopl.Collections
import DynFlags
import FastString
import ForeignCall
import Outputable hiding (panic, pprPanic)
import qualified Outputable
import Platform
import OrdList
import UniqSupply
import Unique
import Util
import Control.Monad.Trans.Class
import Control.Monad.Trans.Writer
import Data.Semigroup ( Semigroup )
import qualified Data.Semigroup as Semigroup
import Data.List ( nub )
import Data.Maybe ( catMaybes )
type Atomic = Bool
type LlvmStatements = OrdList LlvmStatement
-- -----------------------------------------------------------------------------
-- | Top-level of the LLVM proc Code generator
--
genLlvmProc :: RawCmmDecl -> LlvmM [LlvmCmmDecl]
genLlvmProc (CmmProc infos lbl live graph) = do
let blocks = toBlockListEntryFirstFalseFallthrough graph
(lmblocks, lmdata) <- basicBlocksCodeGen live blocks
let info = mapLookup (g_entry graph) infos
proc = CmmProc info lbl live (ListGraph lmblocks)
return (proc:lmdata)
genLlvmProc _ = panic "genLlvmProc: case that shouldn't reach here!"
-- -----------------------------------------------------------------------------
-- * Block code generation
--
-- | Generate code for a list of blocks that make up a complete
-- procedure. The first block in the list is expected to be the entry
-- point and will get the prologue.
basicBlocksCodeGen :: LiveGlobalRegs -> [CmmBlock]
-> LlvmM ([LlvmBasicBlock], [LlvmCmmDecl])
basicBlocksCodeGen _ [] = panic "no entry block!"
basicBlocksCodeGen live (entryBlock:cmmBlocks)
= do (prologue, prologueTops) <- funPrologue live (entryBlock:cmmBlocks)
-- Generate code
(BasicBlock bid entry, entryTops) <- basicBlockCodeGen entryBlock
(blocks, topss) <- fmap unzip $ mapM basicBlockCodeGen cmmBlocks
-- Compose
let entryBlock = BasicBlock bid (fromOL prologue ++ entry)
return (entryBlock : blocks, prologueTops ++ entryTops ++ concat topss)
-- | Generate code for one block
basicBlockCodeGen :: CmmBlock -> LlvmM ( LlvmBasicBlock, [LlvmCmmDecl] )
basicBlockCodeGen block
= do let (_, nodes, tail) = blockSplit block
id = entryLabel block
(mid_instrs, top) <- stmtsToInstrs $ blockToList nodes
(tail_instrs, top') <- stmtToInstrs tail
let instrs = fromOL (mid_instrs `appOL` tail_instrs)
return (BasicBlock id instrs, top' ++ top)
-- -----------------------------------------------------------------------------
-- * CmmNode code generation
--
-- A statement conversion return data.
-- * LlvmStatements: The compiled LLVM statements.
-- * LlvmCmmDecl: Any global data needed.
type StmtData = (LlvmStatements, [LlvmCmmDecl])
-- | Convert a list of CmmNode's to LlvmStatement's
stmtsToInstrs :: [CmmNode e x] -> LlvmM StmtData
stmtsToInstrs stmts
= do (instrss, topss) <- fmap unzip $ mapM stmtToInstrs stmts
return (concatOL instrss, concat topss)
-- | Convert a CmmStmt to a list of LlvmStatement's
stmtToInstrs :: CmmNode e x -> LlvmM StmtData
stmtToInstrs stmt = case stmt of
CmmComment _ -> return (nilOL, []) -- nuke comments
CmmTick _ -> return (nilOL, [])
CmmUnwind {} -> return (nilOL, [])
CmmAssign reg src -> genAssign reg src
CmmStore addr src -> genStore addr src
CmmBranch id -> genBranch id
CmmCondBranch arg true false likely
-> genCondBranch arg true false likely
CmmSwitch arg ids -> genSwitch arg ids
-- Foreign Call
CmmUnsafeForeignCall target res args
-> genCall target res args
-- Tail call
CmmCall { cml_target = arg,
cml_args_regs = live } -> genJump arg live
_ -> panic "Llvm.CodeGen.stmtToInstrs"
-- | Wrapper function to declare an instrinct function by function type
getInstrinct2 :: LMString -> LlvmType -> LlvmM ExprData
getInstrinct2 fname fty@(LMFunction funSig) = do
let fv = LMGlobalVar fname fty (funcLinkage funSig) Nothing Nothing Constant
fn <- funLookup fname
tops <- case fn of
Just _ ->
return []
Nothing -> do
funInsert fname fty
un <- getUniqueM
let lbl = mkAsmTempLabel un
return [CmmData (Section Data lbl) [([],[fty])]]
return (fv, nilOL, tops)
getInstrinct2 _ _ = error "getInstrinct2: Non-function type!"
-- | Declares an instrinct function by return and parameter types
getInstrinct :: LMString -> LlvmType -> [LlvmType] -> LlvmM ExprData
getInstrinct fname retTy parTys =
let funSig = LlvmFunctionDecl fname ExternallyVisible CC_Ccc retTy
FixedArgs (tysToParams parTys) Nothing
fty = LMFunction funSig
in getInstrinct2 fname fty
-- | Memory barrier instruction for LLVM >= 3.0
barrier :: LlvmM StmtData
barrier = do
let s = Fence False SyncSeqCst
return (unitOL s, [])
-- | Foreign Calls
genCall :: ForeignTarget -> [CmmFormal] -> [CmmActual]
-> LlvmM StmtData
-- Write barrier needs to be handled specially as it is implemented as an LLVM
-- intrinsic function.
genCall (PrimTarget MO_WriteBarrier) _ _ = do
platform <- getLlvmPlatform
if platformArch platform `elem` [ArchX86, ArchX86_64, ArchSPARC]
then return (nilOL, [])
else barrier
genCall (PrimTarget MO_Touch) _ _
= return (nilOL, [])
genCall (PrimTarget (MO_UF_Conv w)) [dst] [e] = runStmtsDecls $ do
dstV <- getCmmRegW (CmmLocal dst)
let ty = cmmToLlvmType $ localRegType dst
width = widthToLlvmFloat w
castV <- lift $ mkLocalVar ty
ve <- exprToVarW e
statement $ Assignment castV $ Cast LM_Uitofp ve width
statement $ Store castV dstV
genCall (PrimTarget (MO_UF_Conv _)) [_] args =
panic $ "genCall: Too many arguments to MO_UF_Conv. " ++
"Can only handle 1, given" ++ show (length args) ++ "."
-- Handle prefetching data
genCall t@(PrimTarget (MO_Prefetch_Data localityInt)) [] args
| 0 <= localityInt && localityInt <= 3 = runStmtsDecls $ do
let argTy = [i8Ptr, i32, i32, i32]
funTy = \name -> LMFunction $ LlvmFunctionDecl name ExternallyVisible
CC_Ccc LMVoid FixedArgs (tysToParams argTy) Nothing
let (_, arg_hints) = foreignTargetHints t
let args_hints' = zip args arg_hints
argVars <- arg_varsW args_hints' ([], nilOL, [])
fptr <- liftExprData $ getFunPtr funTy t
argVars' <- castVarsW $ zip argVars argTy
doTrashStmts
let argSuffix = [mkIntLit i32 0, mkIntLit i32 localityInt, mkIntLit i32 1]
statement $ Expr $ Call StdCall fptr (argVars' ++ argSuffix) []
| otherwise = panic $ "prefetch locality level integer must be between 0 and 3, given: " ++ (show localityInt)
-- Handle PopCnt, Clz, Ctz, and BSwap that need to only convert arg
-- and return types
genCall t@(PrimTarget (MO_PopCnt w)) dsts args =
genCallSimpleCast w t dsts args
genCall t@(PrimTarget (MO_Clz w)) dsts args =
genCallSimpleCast w t dsts args
genCall t@(PrimTarget (MO_Ctz w)) dsts args =
genCallSimpleCast w t dsts args
genCall t@(PrimTarget (MO_BSwap w)) dsts args =
genCallSimpleCast w t dsts args
genCall (PrimTarget (MO_AtomicRMW width amop)) [dst] [addr, n] = runStmtsDecls $ do
addrVar <- exprToVarW addr
nVar <- exprToVarW n
let targetTy = widthToLlvmInt width
ptrExpr = Cast LM_Inttoptr addrVar (pLift targetTy)
ptrVar <- doExprW (pLift targetTy) ptrExpr
dstVar <- getCmmRegW (CmmLocal dst)
let op = case amop of
AMO_Add -> LAO_Add
AMO_Sub -> LAO_Sub
AMO_And -> LAO_And
AMO_Nand -> LAO_Nand
AMO_Or -> LAO_Or
AMO_Xor -> LAO_Xor
retVar <- doExprW targetTy $ AtomicRMW op ptrVar nVar SyncSeqCst
statement $ Store retVar dstVar
genCall (PrimTarget (MO_AtomicRead _)) [dst] [addr] = runStmtsDecls $ do
dstV <- getCmmRegW (CmmLocal dst)
v1 <- genLoadW True addr (localRegType dst)
statement $ Store v1 dstV
genCall (PrimTarget (MO_Cmpxchg _width))
[dst] [addr, old, new] = runStmtsDecls $ do
addrVar <- exprToVarW addr
oldVar <- exprToVarW old
newVar <- exprToVarW new
let targetTy = getVarType oldVar
ptrExpr = Cast LM_Inttoptr addrVar (pLift targetTy)
ptrVar <- doExprW (pLift targetTy) ptrExpr
dstVar <- getCmmRegW (CmmLocal dst)
retVar <- doExprW (LMStructU [targetTy,i1])
$ CmpXChg ptrVar oldVar newVar SyncSeqCst SyncSeqCst
retVar' <- doExprW targetTy $ ExtractV retVar 0
statement $ Store retVar' dstVar
genCall (PrimTarget (MO_AtomicWrite _width)) [] [addr, val] = runStmtsDecls $ do
addrVar <- exprToVarW addr
valVar <- exprToVarW val
let ptrTy = pLift $ getVarType valVar
ptrExpr = Cast LM_Inttoptr addrVar ptrTy
ptrVar <- doExprW ptrTy ptrExpr
statement $ Expr $ AtomicRMW LAO_Xchg ptrVar valVar SyncSeqCst
-- Handle memcpy function specifically since llvm's intrinsic version takes
-- some extra parameters.
genCall t@(PrimTarget op) [] args
| Just align <- machOpMemcpyishAlign op = runStmtsDecls $ do
dflags <- getDynFlags
let isVolTy = [i1]
isVolVal = [mkIntLit i1 0]
argTy | MO_Memset _ <- op = [i8Ptr, i8, llvmWord dflags, i32] ++ isVolTy
| otherwise = [i8Ptr, i8Ptr, llvmWord dflags, i32] ++ isVolTy
funTy = \name -> LMFunction $ LlvmFunctionDecl name ExternallyVisible
CC_Ccc LMVoid FixedArgs (tysToParams argTy) Nothing
let (_, arg_hints) = foreignTargetHints t
let args_hints = zip args arg_hints
argVars <- arg_varsW args_hints ([], nilOL, [])
fptr <- getFunPtrW funTy t
argVars' <- castVarsW $ zip argVars argTy
doTrashStmts
let alignVal = mkIntLit i32 align
arguments = argVars' ++ (alignVal:isVolVal)
statement $ Expr $ Call StdCall fptr arguments []
-- We handle MO_U_Mul2 by simply using a 'mul' instruction, but with operands
-- twice the width (we first zero-extend them), e.g., on 64-bit arch we will
-- generate 'mul' on 128-bit operands. Then we only need some plumbing to
-- extract the two 64-bit values out of 128-bit result.
genCall (PrimTarget (MO_U_Mul2 w)) [dstH, dstL] [lhs, rhs] = runStmtsDecls $ do
let width = widthToLlvmInt w
bitWidth = widthInBits w
width2x = LMInt (bitWidth * 2)
-- First zero-extend the operands ('mul' instruction requires the operands
-- and the result to be of the same type). Note that we don't use 'castVars'
-- because it tries to do LM_Sext.
lhsVar <- exprToVarW lhs
rhsVar <- exprToVarW rhs
lhsExt <- doExprW width2x $ Cast LM_Zext lhsVar width2x
rhsExt <- doExprW width2x $ Cast LM_Zext rhsVar width2x
-- Do the actual multiplication (note that the result is also 2x width).
retV <- doExprW width2x $ LlvmOp LM_MO_Mul lhsExt rhsExt
-- Extract the lower bits of the result into retL.
retL <- doExprW width $ Cast LM_Trunc retV width
-- Now we right-shift the higher bits by width.
let widthLlvmLit = LMLitVar $ LMIntLit (fromIntegral bitWidth) width
retShifted <- doExprW width2x $ LlvmOp LM_MO_LShr retV widthLlvmLit
-- And extract them into retH.
retH <- doExprW width $ Cast LM_Trunc retShifted width
dstRegL <- getCmmRegW (CmmLocal dstL)
dstRegH <- getCmmRegW (CmmLocal dstH)
statement $ Store retL dstRegL
statement $ Store retH dstRegH
-- MO_U_QuotRem2 is another case we handle by widening the registers to double
-- the width and use normal LLVM instructions (similarly to the MO_U_Mul2). The
-- main difference here is that we need to combine two words into one register
-- and then use both 'udiv' and 'urem' instructions to compute the result.
genCall (PrimTarget (MO_U_QuotRem2 w))
[dstQ, dstR] [lhsH, lhsL, rhs] = runStmtsDecls $ do
let width = widthToLlvmInt w
bitWidth = widthInBits w
width2x = LMInt (bitWidth * 2)
-- First zero-extend all parameters to double width.
let zeroExtend expr = do
var <- exprToVarW expr
doExprW width2x $ Cast LM_Zext var width2x
lhsExtH <- zeroExtend lhsH
lhsExtL <- zeroExtend lhsL
rhsExt <- zeroExtend rhs
-- Now we combine the first two parameters (that represent the high and low
-- bits of the value). So first left-shift the high bits to their position
-- and then bit-or them with the low bits.
let widthLlvmLit = LMLitVar $ LMIntLit (fromIntegral bitWidth) width
lhsExtHShifted <- doExprW width2x $ LlvmOp LM_MO_Shl lhsExtH widthLlvmLit
lhsExt <- doExprW width2x $ LlvmOp LM_MO_Or lhsExtHShifted lhsExtL
-- Finally, we can call 'udiv' and 'urem' to compute the results.
retExtDiv <- doExprW width2x $ LlvmOp LM_MO_UDiv lhsExt rhsExt
retExtRem <- doExprW width2x $ LlvmOp LM_MO_URem lhsExt rhsExt
-- And since everything is in 2x width, we need to truncate the results and
-- then return them.
let narrow var = doExprW width $ Cast LM_Trunc var width
retDiv <- narrow retExtDiv
retRem <- narrow retExtRem
dstRegQ <- lift $ getCmmReg (CmmLocal dstQ)
dstRegR <- lift $ getCmmReg (CmmLocal dstR)
statement $ Store retDiv dstRegQ
statement $ Store retRem dstRegR
-- Handle the MO_{Add,Sub}IntC separately. LLVM versions return a record from
-- which we need to extract the actual values.
genCall t@(PrimTarget (MO_AddIntC w)) [dstV, dstO] [lhs, rhs] =
genCallWithOverflow t w [dstV, dstO] [lhs, rhs]
genCall t@(PrimTarget (MO_SubIntC w)) [dstV, dstO] [lhs, rhs] =
genCallWithOverflow t w [dstV, dstO] [lhs, rhs]
-- Similar to MO_{Add,Sub}IntC, but MO_Add2 expects the first element of the
-- return tuple to be the overflow bit and the second element to contain the
-- actual result of the addition. So we still use genCallWithOverflow but swap
-- the return registers.
genCall t@(PrimTarget (MO_Add2 w)) [dstO, dstV] [lhs, rhs] =
genCallWithOverflow t w [dstV, dstO] [lhs, rhs]
genCall t@(PrimTarget (MO_SubWordC w)) [dstV, dstO] [lhs, rhs] =
genCallWithOverflow t w [dstV, dstO] [lhs, rhs]
-- Handle all other foreign calls and prim ops.
genCall target res args = runStmtsDecls $ do
dflags <- getDynFlags
-- parameter types
let arg_type (_, AddrHint) = i8Ptr
-- cast pointers to i8*. Llvm equivalent of void*
arg_type (expr, _) = cmmToLlvmType $ cmmExprType dflags expr
-- ret type
let ret_type [] = LMVoid
ret_type [(_, AddrHint)] = i8Ptr
ret_type [(reg, _)] = cmmToLlvmType $ localRegType reg
ret_type t = panic $ "genCall: Too many return values! Can only handle"
++ " 0 or 1, given " ++ show (length t) ++ "."
-- extract Cmm call convention, and translate to LLVM call convention
platform <- lift $ getLlvmPlatform
let lmconv = case target of
ForeignTarget _ (ForeignConvention conv _ _ _) ->
case conv of
StdCallConv -> case platformArch platform of
ArchX86 -> CC_X86_Stdcc
ArchX86_64 -> CC_X86_Stdcc
_ -> CC_Ccc
CCallConv -> CC_Ccc
CApiConv -> CC_Ccc
PrimCallConv -> panic "LlvmCodeGen.CodeGen.genCall: PrimCallConv"
JavaScriptCallConv -> panic "LlvmCodeGen.CodeGen.genCall: JavaScriptCallConv"
PrimTarget _ -> CC_Ccc
{-
CC_Ccc of the possibilities here are a worry with the use of a custom
calling convention for passing STG args. In practice the more
dangerous combinations (e.g StdCall + llvmGhcCC) don't occur.
The native code generator only handles StdCall and CCallConv.
-}
-- call attributes
let fnAttrs | never_returns = NoReturn : llvmStdFunAttrs
| otherwise = llvmStdFunAttrs
never_returns = case target of
ForeignTarget _ (ForeignConvention _ _ _ CmmNeverReturns) -> True
_ -> False
-- fun type
let (res_hints, arg_hints) = foreignTargetHints target
let args_hints = zip args arg_hints
let ress_hints = zip res res_hints
let ccTy = StdCall -- tail calls should be done through CmmJump
let retTy = ret_type ress_hints
let argTy = tysToParams $ map arg_type args_hints
let funTy = \name -> LMFunction $ LlvmFunctionDecl name ExternallyVisible
lmconv retTy FixedArgs argTy (llvmFunAlign dflags)
argVars <- arg_varsW args_hints ([], nilOL, [])
fptr <- getFunPtrW funTy target
let doReturn | ccTy == TailCall = statement $ Return Nothing
| never_returns = statement $ Unreachable
| otherwise = return ()
doTrashStmts
-- make the actual call
case retTy of
LMVoid -> do
statement $ Expr $ Call ccTy fptr argVars fnAttrs
_ -> do
v1 <- doExprW retTy $ Call ccTy fptr argVars fnAttrs
-- get the return register
let ret_reg [reg] = reg
ret_reg t = panic $ "genCall: Bad number of registers! Can only handle"
++ " 1, given " ++ show (length t) ++ "."
let creg = ret_reg res
vreg <- getCmmRegW (CmmLocal creg)
if retTy == pLower (getVarType vreg)
then do
statement $ Store v1 vreg
doReturn
else do
let ty = pLower $ getVarType vreg
let op = case ty of
vt | isPointer vt -> LM_Bitcast
| isInt vt -> LM_Ptrtoint
| otherwise ->
panic $ "genCall: CmmReg bad match for"
++ " returned type!"
v2 <- doExprW ty $ Cast op v1 ty
statement $ Store v2 vreg
doReturn
-- | Generate a call to an LLVM intrinsic that performs arithmetic operation
-- with overflow bit (i.e., returns a struct containing the actual result of the
-- operation and an overflow bit). This function will also extract the overflow
-- bit and zero-extend it (all the corresponding Cmm PrimOps represent the
-- overflow "bit" as a usual Int# or Word#).
genCallWithOverflow
:: ForeignTarget -> Width -> [CmmFormal] -> [CmmActual] -> LlvmM StmtData
genCallWithOverflow t@(PrimTarget op) w [dstV, dstO] [lhs, rhs] = do
-- So far this was only tested for the following four CallishMachOps.
let valid = op `elem` [ MO_Add2 w
, MO_AddIntC w
, MO_SubIntC w
, MO_SubWordC w
]
MASSERT(valid)
let width = widthToLlvmInt w
-- This will do most of the work of generating the call to the intrinsic and
-- extracting the values from the struct.
(value, overflowBit, (stmts, top)) <-
genCallExtract t w (lhs, rhs) (width, i1)
-- value is i<width>, but overflowBit is i1, so we need to cast (Cmm expects
-- both to be i<width>)
(overflow, zext) <- doExpr width $ Cast LM_Zext overflowBit width
dstRegV <- getCmmReg (CmmLocal dstV)
dstRegO <- getCmmReg (CmmLocal dstO)
let storeV = Store value dstRegV
storeO = Store overflow dstRegO
return (stmts `snocOL` zext `snocOL` storeV `snocOL` storeO, top)
genCallWithOverflow _ _ _ _ =
panic "genCallExtract: wrong ForeignTarget or number of arguments"
-- | A helper function for genCallWithOverflow that handles generating the call
-- to the LLVM intrinsic and extracting the result from the struct to LlvmVars.
genCallExtract
:: ForeignTarget -- ^ PrimOp
-> Width -- ^ Width of the operands.
-> (CmmActual, CmmActual) -- ^ Actual arguments.
-> (LlvmType, LlvmType) -- ^ LLVM types of the returned struct.
-> LlvmM (LlvmVar, LlvmVar, StmtData)
genCallExtract target@(PrimTarget op) w (argA, argB) (llvmTypeA, llvmTypeB) = do
let width = widthToLlvmInt w
argTy = [width, width]
retTy = LMStructU [llvmTypeA, llvmTypeB]
-- Process the arguments.
let args_hints = zip [argA, argB] (snd $ foreignTargetHints target)
(argsV1, args1, top1) <- arg_vars args_hints ([], nilOL, [])
(argsV2, args2) <- castVars $ zip argsV1 argTy
-- Get the function and make the call.
fname <- cmmPrimOpFunctions op
(fptr, _, top2) <- getInstrinct fname retTy argTy
-- We use StdCall for primops. See also the last case of genCall.
(retV, call) <- doExpr retTy $ Call StdCall fptr argsV2 []
-- This will result in a two element struct, we need to use "extractvalue"
-- to get them out of it.
(res1, ext1) <- doExpr llvmTypeA (ExtractV retV 0)
(res2, ext2) <- doExpr llvmTypeB (ExtractV retV 1)
let stmts = args1 `appOL` args2 `snocOL` call `snocOL` ext1 `snocOL` ext2
tops = top1 ++ top2
return (res1, res2, (stmts, tops))
genCallExtract _ _ _ _ =
panic "genCallExtract: unsupported ForeignTarget"
-- Handle simple function call that only need simple type casting, of the form:
-- truncate arg >>= \a -> call(a) >>= zext
--
-- since GHC only really has i32 and i64 types and things like Word8 are backed
-- by an i32 and just present a logical i8 range. So we must handle conversions
-- from i32 to i8 explicitly as LLVM is strict about types.
genCallSimpleCast :: Width -> ForeignTarget -> [CmmFormal] -> [CmmActual]
-> LlvmM StmtData
genCallSimpleCast w t@(PrimTarget op) [dst] args = do
let width = widthToLlvmInt w
dstTy = cmmToLlvmType $ localRegType dst
fname <- cmmPrimOpFunctions op
(fptr, _, top3) <- getInstrinct fname width [width]
dstV <- getCmmReg (CmmLocal dst)
let (_, arg_hints) = foreignTargetHints t
let args_hints = zip args arg_hints
(argsV, stmts2, top2) <- arg_vars args_hints ([], nilOL, [])
(argsV', stmts4) <- castVars $ zip argsV [width]
(retV, s1) <- doExpr width $ Call StdCall fptr argsV' []
([retV'], stmts5) <- castVars [(retV,dstTy)]
let s2 = Store retV' dstV
let stmts = stmts2 `appOL` stmts4 `snocOL`
s1 `appOL` stmts5 `snocOL` s2
return (stmts, top2 ++ top3)
genCallSimpleCast _ _ dsts _ =
panic ("genCallSimpleCast: " ++ show (length dsts) ++ " dsts")
-- | Create a function pointer from a target.
getFunPtrW :: (LMString -> LlvmType) -> ForeignTarget
-> WriterT LlvmAccum LlvmM LlvmVar
getFunPtrW funTy targ = liftExprData $ getFunPtr funTy targ
-- | Create a function pointer from a target.
getFunPtr :: (LMString -> LlvmType) -> ForeignTarget
-> LlvmM ExprData
getFunPtr funTy targ = case targ of
ForeignTarget (CmmLit (CmmLabel lbl)) _ -> do
name <- strCLabel_llvm lbl
getHsFunc' name (funTy name)
ForeignTarget expr _ -> do
(v1, stmts, top) <- exprToVar expr
dflags <- getDynFlags
let fty = funTy $ fsLit "dynamic"
cast = case getVarType v1 of
ty | isPointer ty -> LM_Bitcast
ty | isInt ty -> LM_Inttoptr
ty -> panic $ "genCall: Expr is of bad type for function"
++ " call! (" ++ showSDoc dflags (ppr ty) ++ ")"
(v2,s1) <- doExpr (pLift fty) $ Cast cast v1 (pLift fty)
return (v2, stmts `snocOL` s1, top)
PrimTarget mop -> do
name <- cmmPrimOpFunctions mop
let fty = funTy name
getInstrinct2 name fty
-- | Conversion of call arguments.
arg_varsW :: [(CmmActual, ForeignHint)]
-> ([LlvmVar], LlvmStatements, [LlvmCmmDecl])
-> WriterT LlvmAccum LlvmM [LlvmVar]
arg_varsW xs ys = do
(vars, stmts, decls) <- lift $ arg_vars xs ys
tell $ LlvmAccum stmts decls
return vars
-- | Conversion of call arguments.
arg_vars :: [(CmmActual, ForeignHint)]
-> ([LlvmVar], LlvmStatements, [LlvmCmmDecl])
-> LlvmM ([LlvmVar], LlvmStatements, [LlvmCmmDecl])
arg_vars [] (vars, stmts, tops)
= return (vars, stmts, tops)
arg_vars ((e, AddrHint):rest) (vars, stmts, tops)
= do (v1, stmts', top') <- exprToVar e
dflags <- getDynFlags
let op = case getVarType v1 of
ty | isPointer ty -> LM_Bitcast
ty | isInt ty -> LM_Inttoptr
a -> panic $ "genCall: Can't cast llvmType to i8*! ("
++ showSDoc dflags (ppr a) ++ ")"
(v2, s1) <- doExpr i8Ptr $ Cast op v1 i8Ptr
arg_vars rest (vars ++ [v2], stmts `appOL` stmts' `snocOL` s1,
tops ++ top')
arg_vars ((e, _):rest) (vars, stmts, tops)
= do (v1, stmts', top') <- exprToVar e
arg_vars rest (vars ++ [v1], stmts `appOL` stmts', tops ++ top')
-- | Cast a collection of LLVM variables to specific types.
castVarsW :: [(LlvmVar, LlvmType)]
-> WriterT LlvmAccum LlvmM [LlvmVar]
castVarsW vars = do
(vars, stmts) <- lift $ castVars vars
tell $ LlvmAccum stmts mempty
return vars
-- | Cast a collection of LLVM variables to specific types.
castVars :: [(LlvmVar, LlvmType)]
-> LlvmM ([LlvmVar], LlvmStatements)
castVars vars = do
done <- mapM (uncurry castVar) vars
let (vars', stmts) = unzip done
return (vars', toOL stmts)
-- | Cast an LLVM variable to a specific type, panicing if it can't be done.
castVar :: LlvmVar -> LlvmType -> LlvmM (LlvmVar, LlvmStatement)
castVar v t | getVarType v == t
= return (v, Nop)
| otherwise
= do dflags <- getDynFlags
let op = case (getVarType v, t) of
(LMInt n, LMInt m)
-> if n < m then LM_Sext else LM_Trunc
(vt, _) | isFloat vt && isFloat t
-> if llvmWidthInBits dflags vt < llvmWidthInBits dflags t
then LM_Fpext else LM_Fptrunc
(vt, _) | isInt vt && isFloat t -> LM_Sitofp
(vt, _) | isFloat vt && isInt t -> LM_Fptosi
(vt, _) | isInt vt && isPointer t -> LM_Inttoptr
(vt, _) | isPointer vt && isInt t -> LM_Ptrtoint
(vt, _) | isPointer vt && isPointer t -> LM_Bitcast
(vt, _) | isVector vt && isVector t -> LM_Bitcast
(vt, _) -> panic $ "castVars: Can't cast this type ("
++ showSDoc dflags (ppr vt) ++ ") to (" ++ showSDoc dflags (ppr t) ++ ")"
doExpr t $ Cast op v t
-- | Decide what C function to use to implement a CallishMachOp
cmmPrimOpFunctions :: CallishMachOp -> LlvmM LMString
cmmPrimOpFunctions mop = do
dflags <- getDynFlags
let intrinTy1 = "p0i8.p0i8." ++ showSDoc dflags (ppr $ llvmWord dflags)
intrinTy2 = "p0i8." ++ showSDoc dflags (ppr $ llvmWord dflags)
unsupported = panic ("cmmPrimOpFunctions: " ++ show mop
++ " not supported here")
return $ case mop of
MO_F32_Exp -> fsLit "expf"
MO_F32_Log -> fsLit "logf"
MO_F32_Sqrt -> fsLit "llvm.sqrt.f32"
MO_F32_Fabs -> fsLit "llvm.fabs.f32"
MO_F32_Pwr -> fsLit "llvm.pow.f32"
MO_F32_Sin -> fsLit "llvm.sin.f32"
MO_F32_Cos -> fsLit "llvm.cos.f32"
MO_F32_Tan -> fsLit "tanf"
MO_F32_Asin -> fsLit "asinf"
MO_F32_Acos -> fsLit "acosf"
MO_F32_Atan -> fsLit "atanf"
MO_F32_Sinh -> fsLit "sinhf"
MO_F32_Cosh -> fsLit "coshf"
MO_F32_Tanh -> fsLit "tanhf"
MO_F64_Exp -> fsLit "exp"
MO_F64_Log -> fsLit "log"
MO_F64_Sqrt -> fsLit "llvm.sqrt.f64"
MO_F64_Fabs -> fsLit "llvm.fabs.f64"
MO_F64_Pwr -> fsLit "llvm.pow.f64"
MO_F64_Sin -> fsLit "llvm.sin.f64"
MO_F64_Cos -> fsLit "llvm.cos.f64"
MO_F64_Tan -> fsLit "tan"
MO_F64_Asin -> fsLit "asin"
MO_F64_Acos -> fsLit "acos"
MO_F64_Atan -> fsLit "atan"
MO_F64_Sinh -> fsLit "sinh"
MO_F64_Cosh -> fsLit "cosh"
MO_F64_Tanh -> fsLit "tanh"
MO_Memcpy _ -> fsLit $ "llvm.memcpy." ++ intrinTy1
MO_Memmove _ -> fsLit $ "llvm.memmove." ++ intrinTy1
MO_Memset _ -> fsLit $ "llvm.memset." ++ intrinTy2
MO_Memcmp _ -> fsLit $ "memcmp"
(MO_PopCnt w) -> fsLit $ "llvm.ctpop." ++ showSDoc dflags (ppr $ widthToLlvmInt w)
(MO_BSwap w) -> fsLit $ "llvm.bswap." ++ showSDoc dflags (ppr $ widthToLlvmInt w)
(MO_Clz w) -> fsLit $ "llvm.ctlz." ++ showSDoc dflags (ppr $ widthToLlvmInt w)
(MO_Ctz w) -> fsLit $ "llvm.cttz." ++ showSDoc dflags (ppr $ widthToLlvmInt w)
(MO_Prefetch_Data _ )-> fsLit "llvm.prefetch"
MO_AddIntC w -> fsLit $ "llvm.sadd.with.overflow."
++ showSDoc dflags (ppr $ widthToLlvmInt w)
MO_SubIntC w -> fsLit $ "llvm.ssub.with.overflow."
++ showSDoc dflags (ppr $ widthToLlvmInt w)
MO_Add2 w -> fsLit $ "llvm.uadd.with.overflow."
++ showSDoc dflags (ppr $ widthToLlvmInt w)
MO_SubWordC w -> fsLit $ "llvm.usub.with.overflow."
++ showSDoc dflags (ppr $ widthToLlvmInt w)
MO_S_QuotRem {} -> unsupported
MO_U_QuotRem {} -> unsupported
MO_U_QuotRem2 {} -> unsupported
-- We support MO_U_Mul2 through ordinary LLVM mul instruction, see the
-- appropriate case of genCall.
MO_U_Mul2 {} -> unsupported
MO_WriteBarrier -> unsupported
MO_Touch -> unsupported
MO_UF_Conv _ -> unsupported
MO_AtomicRead _ -> unsupported
MO_AtomicRMW _ _ -> unsupported
MO_AtomicWrite _ -> unsupported
MO_Cmpxchg _ -> unsupported
-- | Tail function calls
genJump :: CmmExpr -> [GlobalReg] -> LlvmM StmtData
-- Call to known function
genJump (CmmLit (CmmLabel lbl)) live = do
(vf, stmts, top) <- getHsFunc live lbl
(stgRegs, stgStmts) <- funEpilogue live
let s1 = Expr $ Call TailCall vf stgRegs llvmStdFunAttrs
let s2 = Return Nothing
return (stmts `appOL` stgStmts `snocOL` s1 `snocOL` s2, top)
-- Call to unknown function / address
genJump expr live = do
fty <- llvmFunTy live
(vf, stmts, top) <- exprToVar expr
dflags <- getDynFlags
let cast = case getVarType vf of
ty | isPointer ty -> LM_Bitcast
ty | isInt ty -> LM_Inttoptr
ty -> panic $ "genJump: Expr is of bad type for function call! ("
++ showSDoc dflags (ppr ty) ++ ")"
(v1, s1) <- doExpr (pLift fty) $ Cast cast vf (pLift fty)
(stgRegs, stgStmts) <- funEpilogue live
let s2 = Expr $ Call TailCall v1 stgRegs llvmStdFunAttrs
let s3 = Return Nothing
return (stmts `snocOL` s1 `appOL` stgStmts `snocOL` s2 `snocOL` s3,
top)
-- | CmmAssign operation
--
-- We use stack allocated variables for CmmReg. The optimiser will replace
-- these with registers when possible.
genAssign :: CmmReg -> CmmExpr -> LlvmM StmtData
genAssign reg val = do
vreg <- getCmmReg reg
(vval, stmts2, top2) <- exprToVar val
let stmts = stmts2
let ty = (pLower . getVarType) vreg
dflags <- getDynFlags
case ty of
-- Some registers are pointer types, so need to cast value to pointer
LMPointer _ | getVarType vval == llvmWord dflags -> do
(v, s1) <- doExpr ty $ Cast LM_Inttoptr vval ty
let s2 = Store v vreg
return (stmts `snocOL` s1 `snocOL` s2, top2)
LMVector _ _ -> do
(v, s1) <- doExpr ty $ Cast LM_Bitcast vval ty
let s2 = Store v vreg
return (stmts `snocOL` s1 `snocOL` s2, top2)
_ -> do
let s1 = Store vval vreg
return (stmts `snocOL` s1, top2)
-- | CmmStore operation
genStore :: CmmExpr -> CmmExpr -> LlvmM StmtData
-- First we try to detect a few common cases and produce better code for
-- these then the default case. We are mostly trying to detect Cmm code
-- like I32[Sp + n] and use 'getelementptr' operations instead of the
-- generic case that uses casts and pointer arithmetic
genStore addr@(CmmReg (CmmGlobal r)) val
= genStore_fast addr r 0 val
genStore addr@(CmmRegOff (CmmGlobal r) n) val
= genStore_fast addr r n val
genStore addr@(CmmMachOp (MO_Add _) [
(CmmReg (CmmGlobal r)),
(CmmLit (CmmInt n _))])
val
= genStore_fast addr r (fromInteger n) val
genStore addr@(CmmMachOp (MO_Sub _) [
(CmmReg (CmmGlobal r)),
(CmmLit (CmmInt n _))])
val
= genStore_fast addr r (negate $ fromInteger n) val
-- generic case
genStore addr val
= getTBAAMeta topN >>= genStore_slow addr val
-- | CmmStore operation
-- This is a special case for storing to a global register pointer
-- offset such as I32[Sp+8].
genStore_fast :: CmmExpr -> GlobalReg -> Int -> CmmExpr
-> LlvmM StmtData
genStore_fast addr r n val
= do dflags <- getDynFlags
(gv, grt, s1) <- getCmmRegVal (CmmGlobal r)
meta <- getTBAARegMeta r
let (ix,rem) = n `divMod` ((llvmWidthInBits dflags . pLower) grt `div` 8)
case isPointer grt && rem == 0 of
True -> do
(vval, stmts, top) <- exprToVar val
(ptr, s2) <- doExpr grt $ GetElemPtr True gv [toI32 ix]
-- We might need a different pointer type, so check
case pLower grt == getVarType vval of
-- were fine
True -> do
let s3 = MetaStmt meta $ Store vval ptr
return (stmts `appOL` s1 `snocOL` s2
`snocOL` s3, top)
-- cast to pointer type needed
False -> do
let ty = (pLift . getVarType) vval
(ptr', s3) <- doExpr ty $ Cast LM_Bitcast ptr ty
let s4 = MetaStmt meta $ Store vval ptr'
return (stmts `appOL` s1 `snocOL` s2
`snocOL` s3 `snocOL` s4, top)
-- If its a bit type then we use the slow method since
-- we can't avoid casting anyway.
False -> genStore_slow addr val meta
-- | CmmStore operation
-- Generic case. Uses casts and pointer arithmetic if needed.
genStore_slow :: CmmExpr -> CmmExpr -> [MetaAnnot] -> LlvmM StmtData
genStore_slow addr val meta = do
(vaddr, stmts1, top1) <- exprToVar addr
(vval, stmts2, top2) <- exprToVar val
let stmts = stmts1 `appOL` stmts2
dflags <- getDynFlags
case getVarType vaddr of
-- sometimes we need to cast an int to a pointer before storing
LMPointer ty@(LMPointer _) | getVarType vval == llvmWord dflags -> do
(v, s1) <- doExpr ty $ Cast LM_Inttoptr vval ty
let s2 = MetaStmt meta $ Store v vaddr
return (stmts `snocOL` s1 `snocOL` s2, top1 ++ top2)
LMPointer _ -> do
let s1 = MetaStmt meta $ Store vval vaddr
return (stmts `snocOL` s1, top1 ++ top2)
i@(LMInt _) | i == llvmWord dflags -> do
let vty = pLift $ getVarType vval
(vptr, s1) <- doExpr vty $ Cast LM_Inttoptr vaddr vty
let s2 = MetaStmt meta $ Store vval vptr
return (stmts `snocOL` s1 `snocOL` s2, top1 ++ top2)
other ->
pprPanic "genStore: ptr not right type!"
(PprCmm.pprExpr addr <+> text (
"Size of Ptr: " ++ show (llvmPtrBits dflags) ++
", Size of var: " ++ show (llvmWidthInBits dflags other) ++
", Var: " ++ showSDoc dflags (ppr vaddr)))
-- | Unconditional branch
genBranch :: BlockId -> LlvmM StmtData
genBranch id =
let label = blockIdToLlvm id
in return (unitOL $ Branch label, [])
-- | Conditional branch
genCondBranch :: CmmExpr -> BlockId -> BlockId -> Maybe Bool -> LlvmM StmtData
genCondBranch cond idT idF likely = do
let labelT = blockIdToLlvm idT
let labelF = blockIdToLlvm idF
-- See Note [Literals and branch conditions].
(vc, stmts1, top1) <- exprToVarOpt i1Option cond
if getVarType vc == i1
then do
(vc', (stmts2, top2)) <- case likely of
Just b -> genExpectLit (if b then 1 else 0) i1 vc
_ -> pure (vc, (nilOL, []))
let s1 = BranchIf vc' labelT labelF
return (stmts1 `appOL` stmts2 `snocOL` s1, top1 ++ top2)
else do
dflags <- getDynFlags
panic $ "genCondBranch: Cond expr not bool! (" ++ showSDoc dflags (ppr vc) ++ ")"
-- | Generate call to llvm.expect.x intrinsic. Assigning result to a new var.
genExpectLit :: Integer -> LlvmType -> LlvmVar -> LlvmM (LlvmVar, StmtData)
genExpectLit expLit expTy var = do
dflags <- getDynFlags
let
lit = LMLitVar $ LMIntLit expLit expTy
llvmExpectName
| isInt expTy = fsLit $ "llvm.expect." ++ showSDoc dflags (ppr expTy)
| otherwise = panic $ "genExpectedLit: Type not an int!"
(llvmExpect, stmts, top) <-
getInstrinct llvmExpectName expTy [expTy, expTy]
(var', call) <- doExpr expTy $ Call StdCall llvmExpect [var, lit] []
return (var', (stmts `snocOL` call, top))
{- Note [Literals and branch conditions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is important that whenever we generate branch conditions for
literals like '1', they are properly narrowed to an LLVM expression of
type 'i1' (for bools.) Otherwise, nobody is happy. So when we convert
a CmmExpr to an LLVM expression for a branch conditional, exprToVarOpt
must be certain to return a properly narrowed type. genLit is
responsible for this, in the case of literal integers.
Often, we won't see direct statements like:
if(1) {
...
} else {
...
}
at this point in the pipeline, because the Glorious Code Generator
will do trivial branch elimination in the sinking pass (among others,)
which will eliminate the expression entirely.
However, it's certainly possible and reasonable for this to occur in
hand-written C-- code. Consider something like:
#if !defined(SOME_CONDITIONAL)
#define CHECK_THING(x) 1
#else
#define CHECK_THING(x) some_operation((x))
#endif
f() {
if (CHECK_THING(xyz)) {
...
} else {
...
}
}
In such an instance, CHECK_THING might result in an *expression* in
one case, and a *literal* in the other, depending on what in
particular was #define'd. So we must be sure to properly narrow the
literal in this case to i1 as it won't be eliminated beforehand.
For a real example of this, see ./rts/StgStdThunks.cmm
-}
-- | Switch branch
genSwitch :: CmmExpr -> SwitchTargets -> LlvmM StmtData
genSwitch cond ids = do
(vc, stmts, top) <- exprToVar cond
let ty = getVarType vc
let labels = [ (mkIntLit ty ix, blockIdToLlvm b)
| (ix, b) <- switchTargetsCases ids ]
-- out of range is undefined, so let's just branch to first label
let defLbl | Just l <- switchTargetsDefault ids = blockIdToLlvm l
| otherwise = snd (head labels)
let s1 = Switch vc defLbl labels
return $ (stmts `snocOL` s1, top)
-- -----------------------------------------------------------------------------
-- * CmmExpr code generation
--
-- | An expression conversion return data:
-- * LlvmVar: The var holding the result of the expression
-- * LlvmStatements: Any statements needed to evaluate the expression
-- * LlvmCmmDecl: Any global data needed for this expression
type ExprData = (LlvmVar, LlvmStatements, [LlvmCmmDecl])
-- | Values which can be passed to 'exprToVar' to configure its
-- behaviour in certain circumstances.
--
-- Currently just used for determining if a comparison should return
-- a boolean (i1) or a word. See Note [Literals and branch conditions].
newtype EOption = EOption { i1Expected :: Bool }
-- XXX: EOption is an ugly and inefficient solution to this problem.
-- | i1 type expected (condition scrutinee).
i1Option :: EOption
i1Option = EOption True
-- | Word type expected (usual).
wordOption :: EOption
wordOption = EOption False
-- | Convert a CmmExpr to a list of LlvmStatements with the result of the
-- expression being stored in the returned LlvmVar.
exprToVar :: CmmExpr -> LlvmM ExprData
exprToVar = exprToVarOpt wordOption
exprToVarOpt :: EOption -> CmmExpr -> LlvmM ExprData
exprToVarOpt opt e = case e of
CmmLit lit
-> genLit opt lit
CmmLoad e' ty
-> genLoad False e' ty
-- Cmmreg in expression is the value, so must load. If you want actual
-- reg pointer, call getCmmReg directly.
CmmReg r -> do
(v1, ty, s1) <- getCmmRegVal r
case isPointer ty of
True -> do
-- Cmm wants the value, so pointer types must be cast to ints
dflags <- getDynFlags
(v2, s2) <- doExpr (llvmWord dflags) $ Cast LM_Ptrtoint v1 (llvmWord dflags)
return (v2, s1 `snocOL` s2, [])
False -> return (v1, s1, [])
CmmMachOp op exprs
-> genMachOp opt op exprs
CmmRegOff r i
-> do dflags <- getDynFlags
exprToVar $ expandCmmReg dflags (r, i)
CmmStackSlot _ _
-> panic "exprToVar: CmmStackSlot not supported!"
-- | Handle CmmMachOp expressions
genMachOp :: EOption -> MachOp -> [CmmExpr] -> LlvmM ExprData
-- Unary Machop
genMachOp _ op [x] = case op of
MO_Not w ->
let all1 = mkIntLit (widthToLlvmInt w) (-1)
in negate (widthToLlvmInt w) all1 LM_MO_Xor
MO_S_Neg w ->
let all0 = mkIntLit (widthToLlvmInt w) 0
in negate (widthToLlvmInt w) all0 LM_MO_Sub
MO_F_Neg w ->
let all0 = LMLitVar $ LMFloatLit (-0) (widthToLlvmFloat w)
in negate (widthToLlvmFloat w) all0 LM_MO_FSub
MO_SF_Conv _ w -> fiConv (widthToLlvmFloat w) LM_Sitofp
MO_FS_Conv _ w -> fiConv (widthToLlvmInt w) LM_Fptosi
MO_SS_Conv from to
-> sameConv from (widthToLlvmInt to) LM_Trunc LM_Sext
MO_UU_Conv from to
-> sameConv from (widthToLlvmInt to) LM_Trunc LM_Zext
MO_FF_Conv from to
-> sameConv from (widthToLlvmFloat to) LM_Fptrunc LM_Fpext
MO_VS_Neg len w ->
let ty = widthToLlvmInt w
vecty = LMVector len ty
all0 = LMIntLit (-0) ty
all0s = LMLitVar $ LMVectorLit (replicate len all0)
in negateVec vecty all0s LM_MO_Sub
MO_VF_Neg len w ->
let ty = widthToLlvmFloat w
vecty = LMVector len ty
all0 = LMFloatLit (-0) ty
all0s = LMLitVar $ LMVectorLit (replicate len all0)
in negateVec vecty all0s LM_MO_FSub
MO_AlignmentCheck _ _ -> panic "-falignment-sanitisation is not supported by -fllvm"
-- Handle unsupported cases explicitly so we get a warning
-- of missing case when new MachOps added
MO_Add _ -> panicOp
MO_Mul _ -> panicOp
MO_Sub _ -> panicOp
MO_S_MulMayOflo _ -> panicOp
MO_S_Quot _ -> panicOp
MO_S_Rem _ -> panicOp
MO_U_MulMayOflo _ -> panicOp
MO_U_Quot _ -> panicOp
MO_U_Rem _ -> panicOp
MO_Eq _ -> panicOp
MO_Ne _ -> panicOp
MO_S_Ge _ -> panicOp
MO_S_Gt _ -> panicOp
MO_S_Le _ -> panicOp
MO_S_Lt _ -> panicOp
MO_U_Ge _ -> panicOp
MO_U_Gt _ -> panicOp
MO_U_Le _ -> panicOp
MO_U_Lt _ -> panicOp
MO_F_Add _ -> panicOp
MO_F_Sub _ -> panicOp
MO_F_Mul _ -> panicOp
MO_F_Quot _ -> panicOp
MO_F_Eq _ -> panicOp
MO_F_Ne _ -> panicOp
MO_F_Ge _ -> panicOp
MO_F_Gt _ -> panicOp
MO_F_Le _ -> panicOp
MO_F_Lt _ -> panicOp
MO_And _ -> panicOp
MO_Or _ -> panicOp
MO_Xor _ -> panicOp
MO_Shl _ -> panicOp
MO_U_Shr _ -> panicOp
MO_S_Shr _ -> panicOp
MO_V_Insert _ _ -> panicOp
MO_V_Extract _ _ -> panicOp
MO_V_Add _ _ -> panicOp
MO_V_Sub _ _ -> panicOp
MO_V_Mul _ _ -> panicOp
MO_VS_Quot _ _ -> panicOp
MO_VS_Rem _ _ -> panicOp
MO_VU_Quot _ _ -> panicOp
MO_VU_Rem _ _ -> panicOp
MO_VF_Insert _ _ -> panicOp
MO_VF_Extract _ _ -> panicOp
MO_VF_Add _ _ -> panicOp
MO_VF_Sub _ _ -> panicOp
MO_VF_Mul _ _ -> panicOp
MO_VF_Quot _ _ -> panicOp
where
negate ty v2 negOp = do
(vx, stmts, top) <- exprToVar x
(v1, s1) <- doExpr ty $ LlvmOp negOp v2 vx
return (v1, stmts `snocOL` s1, top)
negateVec ty v2 negOp = do
(vx, stmts1, top) <- exprToVar x
([vx'], stmts2) <- castVars [(vx, ty)]
(v1, s1) <- doExpr ty $ LlvmOp negOp v2 vx'
return (v1, stmts1 `appOL` stmts2 `snocOL` s1, top)
fiConv ty convOp = do
(vx, stmts, top) <- exprToVar x
(v1, s1) <- doExpr ty $ Cast convOp vx ty
return (v1, stmts `snocOL` s1, top)
sameConv from ty reduce expand = do
x'@(vx, stmts, top) <- exprToVar x
let sameConv' op = do
(v1, s1) <- doExpr ty $ Cast op vx ty
return (v1, stmts `snocOL` s1, top)
dflags <- getDynFlags
let toWidth = llvmWidthInBits dflags ty
-- LLVM doesn't like trying to convert to same width, so
-- need to check for that as we do get Cmm code doing it.
case widthInBits from of
w | w < toWidth -> sameConv' expand
w | w > toWidth -> sameConv' reduce
_w -> return x'
panicOp = panic $ "LLVM.CodeGen.genMachOp: non unary op encountered"
++ "with one argument! (" ++ show op ++ ")"
-- Handle GlobalRegs pointers
genMachOp opt o@(MO_Add _) e@[(CmmReg (CmmGlobal r)), (CmmLit (CmmInt n _))]
= genMachOp_fast opt o r (fromInteger n) e
genMachOp opt o@(MO_Sub _) e@[(CmmReg (CmmGlobal r)), (CmmLit (CmmInt n _))]
= genMachOp_fast opt o r (negate . fromInteger $ n) e
-- Generic case
genMachOp opt op e = genMachOp_slow opt op e
-- | Handle CmmMachOp expressions
-- This is a specialised method that handles Global register manipulations like
-- 'Sp - 16', using the getelementptr instruction.
genMachOp_fast :: EOption -> MachOp -> GlobalReg -> Int -> [CmmExpr]
-> LlvmM ExprData
genMachOp_fast opt op r n e
= do (gv, grt, s1) <- getCmmRegVal (CmmGlobal r)
dflags <- getDynFlags
let (ix,rem) = n `divMod` ((llvmWidthInBits dflags . pLower) grt `div` 8)
case isPointer grt && rem == 0 of
True -> do
(ptr, s2) <- doExpr grt $ GetElemPtr True gv [toI32 ix]
(var, s3) <- doExpr (llvmWord dflags) $ Cast LM_Ptrtoint ptr (llvmWord dflags)
return (var, s1 `snocOL` s2 `snocOL` s3, [])
False -> genMachOp_slow opt op e
-- | Handle CmmMachOp expressions
-- This handles all the cases not handle by the specialised genMachOp_fast.
genMachOp_slow :: EOption -> MachOp -> [CmmExpr] -> LlvmM ExprData
-- Element extraction
genMachOp_slow _ (MO_V_Extract l w) [val, idx] = runExprData $ do
vval <- exprToVarW val
vidx <- exprToVarW idx
[vval'] <- castVarsW [(vval, LMVector l ty)]
doExprW ty $ Extract vval' vidx
where
ty = widthToLlvmInt w
genMachOp_slow _ (MO_VF_Extract l w) [val, idx] = runExprData $ do
vval <- exprToVarW val
vidx <- exprToVarW idx
[vval'] <- castVarsW [(vval, LMVector l ty)]
doExprW ty $ Extract vval' vidx
where
ty = widthToLlvmFloat w
-- Element insertion
genMachOp_slow _ (MO_V_Insert l w) [val, elt, idx] = runExprData $ do
vval <- exprToVarW val
velt <- exprToVarW elt
vidx <- exprToVarW idx
[vval'] <- castVarsW [(vval, ty)]
doExprW ty $ Insert vval' velt vidx
where
ty = LMVector l (widthToLlvmInt w)
genMachOp_slow _ (MO_VF_Insert l w) [val, elt, idx] = runExprData $ do
vval <- exprToVarW val
velt <- exprToVarW elt
vidx <- exprToVarW idx
[vval'] <- castVarsW [(vval, ty)]
doExprW ty $ Insert vval' velt vidx
where
ty = LMVector l (widthToLlvmFloat w)
-- Binary MachOp
genMachOp_slow opt op [x, y] = case op of
MO_Eq _ -> genBinComp opt LM_CMP_Eq
MO_Ne _ -> genBinComp opt LM_CMP_Ne
MO_S_Gt _ -> genBinComp opt LM_CMP_Sgt
MO_S_Ge _ -> genBinComp opt LM_CMP_Sge
MO_S_Lt _ -> genBinComp opt LM_CMP_Slt
MO_S_Le _ -> genBinComp opt LM_CMP_Sle
MO_U_Gt _ -> genBinComp opt LM_CMP_Ugt
MO_U_Ge _ -> genBinComp opt LM_CMP_Uge
MO_U_Lt _ -> genBinComp opt LM_CMP_Ult
MO_U_Le _ -> genBinComp opt LM_CMP_Ule
MO_Add _ -> genBinMach LM_MO_Add
MO_Sub _ -> genBinMach LM_MO_Sub
MO_Mul _ -> genBinMach LM_MO_Mul
MO_U_MulMayOflo _ -> panic "genMachOp: MO_U_MulMayOflo unsupported!"
MO_S_MulMayOflo w -> isSMulOK w x y
MO_S_Quot _ -> genBinMach LM_MO_SDiv
MO_S_Rem _ -> genBinMach LM_MO_SRem
MO_U_Quot _ -> genBinMach LM_MO_UDiv
MO_U_Rem _ -> genBinMach LM_MO_URem
MO_F_Eq _ -> genBinComp opt LM_CMP_Feq
MO_F_Ne _ -> genBinComp opt LM_CMP_Fne
MO_F_Gt _ -> genBinComp opt LM_CMP_Fgt
MO_F_Ge _ -> genBinComp opt LM_CMP_Fge
MO_F_Lt _ -> genBinComp opt LM_CMP_Flt
MO_F_Le _ -> genBinComp opt LM_CMP_Fle
MO_F_Add _ -> genBinMach LM_MO_FAdd
MO_F_Sub _ -> genBinMach LM_MO_FSub
MO_F_Mul _ -> genBinMach LM_MO_FMul
MO_F_Quot _ -> genBinMach LM_MO_FDiv
MO_And _ -> genBinMach LM_MO_And
MO_Or _ -> genBinMach LM_MO_Or
MO_Xor _ -> genBinMach LM_MO_Xor
MO_Shl _ -> genBinMach LM_MO_Shl
MO_U_Shr _ -> genBinMach LM_MO_LShr
MO_S_Shr _ -> genBinMach LM_MO_AShr
MO_V_Add l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_Add
MO_V_Sub l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_Sub
MO_V_Mul l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_Mul
MO_VS_Quot l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_SDiv
MO_VS_Rem l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_SRem
MO_VU_Quot l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_UDiv
MO_VU_Rem l w -> genCastBinMach (LMVector l (widthToLlvmInt w)) LM_MO_URem
MO_VF_Add l w -> genCastBinMach (LMVector l (widthToLlvmFloat w)) LM_MO_FAdd
MO_VF_Sub l w -> genCastBinMach (LMVector l (widthToLlvmFloat w)) LM_MO_FSub
MO_VF_Mul l w -> genCastBinMach (LMVector l (widthToLlvmFloat w)) LM_MO_FMul
MO_VF_Quot l w -> genCastBinMach (LMVector l (widthToLlvmFloat w)) LM_MO_FDiv
MO_Not _ -> panicOp
MO_S_Neg _ -> panicOp
MO_F_Neg _ -> panicOp
MO_SF_Conv _ _ -> panicOp
MO_FS_Conv _ _ -> panicOp
MO_SS_Conv _ _ -> panicOp
MO_UU_Conv _ _ -> panicOp
MO_FF_Conv _ _ -> panicOp
MO_V_Insert {} -> panicOp
MO_V_Extract {} -> panicOp
MO_VS_Neg {} -> panicOp
MO_VF_Insert {} -> panicOp
MO_VF_Extract {} -> panicOp
MO_VF_Neg {} -> panicOp
MO_AlignmentCheck {} -> panicOp
where
binLlvmOp ty binOp = runExprData $ do
vx <- exprToVarW x
vy <- exprToVarW y
if getVarType vx == getVarType vy
then do
doExprW (ty vx) $ binOp vx vy
else do
-- Error. Continue anyway so we can debug the generated ll file.
dflags <- getDynFlags
let style = mkCodeStyle CStyle
toString doc = renderWithStyle dflags doc style
cmmToStr = (lines . toString . PprCmm.pprExpr)
statement $ Comment $ map fsLit $ cmmToStr x
statement $ Comment $ map fsLit $ cmmToStr y
doExprW (ty vx) $ binOp vx vy
binCastLlvmOp ty binOp = runExprData $ do
vx <- exprToVarW x
vy <- exprToVarW y
[vx', vy'] <- castVarsW [(vx, ty), (vy, ty)]
doExprW ty $ binOp vx' vy'
-- | Need to use EOption here as Cmm expects word size results from
-- comparisons while LLVM return i1. Need to extend to llvmWord type
-- if expected. See Note [Literals and branch conditions].
genBinComp opt cmp = do
ed@(v1, stmts, top) <- binLlvmOp (\_ -> i1) (Compare cmp)
dflags <- getDynFlags
if getVarType v1 == i1
then case i1Expected opt of
True -> return ed
False -> do
let w_ = llvmWord dflags
(v2, s1) <- doExpr w_ $ Cast LM_Zext v1 w_
return (v2, stmts `snocOL` s1, top)
else
panic $ "genBinComp: Compare returned type other then i1! "
++ (showSDoc dflags $ ppr $ getVarType v1)
genBinMach op = binLlvmOp getVarType (LlvmOp op)
genCastBinMach ty op = binCastLlvmOp ty (LlvmOp op)
-- | Detect if overflow will occur in signed multiply of the two
-- CmmExpr's. This is the LLVM assembly equivalent of the NCG
-- implementation. Its much longer due to type information/safety.
-- This should actually compile to only about 3 asm instructions.
isSMulOK :: Width -> CmmExpr -> CmmExpr -> LlvmM ExprData
isSMulOK _ x y = runExprData $ do
vx <- exprToVarW x
vy <- exprToVarW y
dflags <- getDynFlags
let word = getVarType vx
let word2 = LMInt $ 2 * (llvmWidthInBits dflags $ getVarType vx)
let shift = llvmWidthInBits dflags word
let shift1 = toIWord dflags (shift - 1)
let shift2 = toIWord dflags shift
if isInt word
then do
x1 <- doExprW word2 $ Cast LM_Sext vx word2
y1 <- doExprW word2 $ Cast LM_Sext vy word2
r1 <- doExprW word2 $ LlvmOp LM_MO_Mul x1 y1
rlow1 <- doExprW word $ Cast LM_Trunc r1 word
rlow2 <- doExprW word $ LlvmOp LM_MO_AShr rlow1 shift1
rhigh1 <- doExprW word2 $ LlvmOp LM_MO_AShr r1 shift2
rhigh2 <- doExprW word $ Cast LM_Trunc rhigh1 word
doExprW word $ LlvmOp LM_MO_Sub rlow2 rhigh2
else
panic $ "isSMulOK: Not bit type! (" ++ showSDoc dflags (ppr word) ++ ")"
panicOp = panic $ "LLVM.CodeGen.genMachOp_slow: unary op encountered"
++ "with two arguments! (" ++ show op ++ ")"
-- More then two expression, invalid!
genMachOp_slow _ _ _ = panic "genMachOp: More then 2 expressions in MachOp!"
-- | Handle CmmLoad expression.
genLoad :: Atomic -> CmmExpr -> CmmType -> LlvmM ExprData
-- First we try to detect a few common cases and produce better code for
-- these then the default case. We are mostly trying to detect Cmm code
-- like I32[Sp + n] and use 'getelementptr' operations instead of the
-- generic case that uses casts and pointer arithmetic
genLoad atomic e@(CmmReg (CmmGlobal r)) ty
= genLoad_fast atomic e r 0 ty
genLoad atomic e@(CmmRegOff (CmmGlobal r) n) ty
= genLoad_fast atomic e r n ty
genLoad atomic e@(CmmMachOp (MO_Add _) [
(CmmReg (CmmGlobal r)),
(CmmLit (CmmInt n _))])
ty
= genLoad_fast atomic e r (fromInteger n) ty
genLoad atomic e@(CmmMachOp (MO_Sub _) [
(CmmReg (CmmGlobal r)),
(CmmLit (CmmInt n _))])
ty
= genLoad_fast atomic e r (negate $ fromInteger n) ty
-- generic case
genLoad atomic e ty
= getTBAAMeta topN >>= genLoad_slow atomic e ty
-- | Handle CmmLoad expression.
-- This is a special case for loading from a global register pointer
-- offset such as I32[Sp+8].
genLoad_fast :: Atomic -> CmmExpr -> GlobalReg -> Int -> CmmType
-> LlvmM ExprData
genLoad_fast atomic e r n ty = do
dflags <- getDynFlags
(gv, grt, s1) <- getCmmRegVal (CmmGlobal r)
meta <- getTBAARegMeta r
let ty' = cmmToLlvmType ty
(ix,rem) = n `divMod` ((llvmWidthInBits dflags . pLower) grt `div` 8)
case isPointer grt && rem == 0 of
True -> do
(ptr, s2) <- doExpr grt $ GetElemPtr True gv [toI32 ix]
-- We might need a different pointer type, so check
case grt == ty' of
-- were fine
True -> do
(var, s3) <- doExpr ty' (MExpr meta $ loadInstr ptr)
return (var, s1 `snocOL` s2 `snocOL` s3,
[])
-- cast to pointer type needed
False -> do
let pty = pLift ty'
(ptr', s3) <- doExpr pty $ Cast LM_Bitcast ptr pty
(var, s4) <- doExpr ty' (MExpr meta $ loadInstr ptr')
return (var, s1 `snocOL` s2 `snocOL` s3
`snocOL` s4, [])
-- If its a bit type then we use the slow method since
-- we can't avoid casting anyway.
False -> genLoad_slow atomic e ty meta
where
loadInstr ptr | atomic = ALoad SyncSeqCst False ptr
| otherwise = Load ptr
-- | Handle Cmm load expression.
-- Generic case. Uses casts and pointer arithmetic if needed.
genLoad_slow :: Atomic -> CmmExpr -> CmmType -> [MetaAnnot] -> LlvmM ExprData
genLoad_slow atomic e ty meta = runExprData $ do
iptr <- exprToVarW e
dflags <- getDynFlags
case getVarType iptr of
LMPointer _ -> do
doExprW (cmmToLlvmType ty) (MExpr meta $ loadInstr iptr)
i@(LMInt _) | i == llvmWord dflags -> do
let pty = LMPointer $ cmmToLlvmType ty
ptr <- doExprW pty $ Cast LM_Inttoptr iptr pty
doExprW (cmmToLlvmType ty) (MExpr meta $ loadInstr ptr)
other -> do pprPanic "exprToVar: CmmLoad expression is not right type!"
(PprCmm.pprExpr e <+> text (
"Size of Ptr: " ++ show (llvmPtrBits dflags) ++
", Size of var: " ++ show (llvmWidthInBits dflags other) ++
", Var: " ++ showSDoc dflags (ppr iptr)))
where
loadInstr ptr | atomic = ALoad SyncSeqCst False ptr
| otherwise = Load ptr
-- | Handle CmmReg expression. This will return a pointer to the stack
-- location of the register. Throws an error if it isn't allocated on
-- the stack.
getCmmReg :: CmmReg -> LlvmM LlvmVar
getCmmReg (CmmLocal (LocalReg un _))
= do exists <- varLookup un
dflags <- getDynFlags
case exists of
Just ety -> return (LMLocalVar un $ pLift ety)
Nothing -> fail $ "getCmmReg: Cmm register " ++ showSDoc dflags (ppr un) ++ " was not allocated!"
-- This should never happen, as every local variable should
-- have been assigned a value at some point, triggering
-- "funPrologue" to allocate it on the stack.
getCmmReg (CmmGlobal g)
= do onStack <- checkStackReg g
dflags <- getDynFlags
if onStack
then return (lmGlobalRegVar dflags g)
else fail $ "getCmmReg: Cmm register " ++ showSDoc dflags (ppr g) ++ " not stack-allocated!"
-- | Return the value of a given register, as well as its type. Might
-- need to be load from stack.
getCmmRegVal :: CmmReg -> LlvmM (LlvmVar, LlvmType, LlvmStatements)
getCmmRegVal reg =
case reg of
CmmGlobal g -> do
onStack <- checkStackReg g
dflags <- getDynFlags
if onStack then loadFromStack else do
let r = lmGlobalRegArg dflags g
return (r, getVarType r, nilOL)
_ -> loadFromStack
where loadFromStack = do
ptr <- getCmmReg reg
let ty = pLower $ getVarType ptr
(v, s) <- doExpr ty (Load ptr)
return (v, ty, unitOL s)
-- | Allocate a local CmmReg on the stack
allocReg :: CmmReg -> (LlvmVar, LlvmStatements)
allocReg (CmmLocal (LocalReg un ty))
= let ty' = cmmToLlvmType ty
var = LMLocalVar un (LMPointer ty')
alc = Alloca ty' 1
in (var, unitOL $ Assignment var alc)
allocReg _ = panic $ "allocReg: Global reg encountered! Global registers should"
++ " have been handled elsewhere!"
-- | Generate code for a literal
genLit :: EOption -> CmmLit -> LlvmM ExprData
genLit opt (CmmInt i w)
-- See Note [Literals and branch conditions].
= let width | i1Expected opt = i1
| otherwise = LMInt (widthInBits w)
-- comm = Comment [ fsLit $ "EOption: " ++ show opt
-- , fsLit $ "Width : " ++ show w
-- , fsLit $ "Width' : " ++ show (widthInBits w)
-- ]
in return (mkIntLit width i, nilOL, [])
genLit _ (CmmFloat r w)
= return (LMLitVar $ LMFloatLit (fromRational r) (widthToLlvmFloat w),
nilOL, [])
genLit opt (CmmVec ls)
= do llvmLits <- mapM toLlvmLit ls
return (LMLitVar $ LMVectorLit llvmLits, nilOL, [])
where
toLlvmLit :: CmmLit -> LlvmM LlvmLit
toLlvmLit lit = do
(llvmLitVar, _, _) <- genLit opt lit
case llvmLitVar of
LMLitVar llvmLit -> return llvmLit
_ -> panic "genLit"
genLit _ cmm@(CmmLabel l)
= do var <- getGlobalPtr =<< strCLabel_llvm l
dflags <- getDynFlags
let lmty = cmmToLlvmType $ cmmLitType dflags cmm
(v1, s1) <- doExpr lmty $ Cast LM_Ptrtoint var (llvmWord dflags)
return (v1, unitOL s1, [])
genLit opt (CmmLabelOff label off) = do
dflags <- getDynFlags
(vlbl, stmts, stat) <- genLit opt (CmmLabel label)
let voff = toIWord dflags off
(v1, s1) <- doExpr (getVarType vlbl) $ LlvmOp LM_MO_Add vlbl voff
return (v1, stmts `snocOL` s1, stat)
genLit opt (CmmLabelDiffOff l1 l2 off) = do
dflags <- getDynFlags
(vl1, stmts1, stat1) <- genLit opt (CmmLabel l1)
(vl2, stmts2, stat2) <- genLit opt (CmmLabel l2)
let voff = toIWord dflags off
let ty1 = getVarType vl1
let ty2 = getVarType vl2
if (isInt ty1) && (isInt ty2)
&& (llvmWidthInBits dflags ty1 == llvmWidthInBits dflags ty2)
then do
(v1, s1) <- doExpr (getVarType vl1) $ LlvmOp LM_MO_Sub vl1 vl2
(v2, s2) <- doExpr (getVarType v1 ) $ LlvmOp LM_MO_Add v1 voff
return (v2, stmts1 `appOL` stmts2 `snocOL` s1 `snocOL` s2,
stat1 ++ stat2)
else
panic "genLit: CmmLabelDiffOff encountered with different label ty!"
genLit opt (CmmBlock b)
= genLit opt (CmmLabel $ infoTblLbl b)
genLit _ CmmHighStackMark
= panic "genStaticLit - CmmHighStackMark unsupported!"
-- -----------------------------------------------------------------------------
-- * Misc
--
-- | Find CmmRegs that get assigned and allocate them on the stack
--
-- Any register that gets written needs to be allcoated on the
-- stack. This avoids having to map a CmmReg to an equivalent SSA form
-- and avoids having to deal with Phi node insertion. This is also
-- the approach recommended by LLVM developers.
--
-- On the other hand, this is unnecessarily verbose if the register in
-- question is never written. Therefore we skip it where we can to
-- save a few lines in the output and hopefully speed compilation up a
-- bit.
funPrologue :: LiveGlobalRegs -> [CmmBlock] -> LlvmM StmtData
funPrologue live cmmBlocks = do
trash <- getTrashRegs
let getAssignedRegs :: CmmNode O O -> [CmmReg]
getAssignedRegs (CmmAssign reg _) = [reg]
-- Calls will trash all registers. Unfortunately, this needs them to
-- be stack-allocated in the first place.
getAssignedRegs (CmmUnsafeForeignCall _ rs _) = map CmmGlobal trash ++ map CmmLocal rs
getAssignedRegs _ = []
getRegsBlock (_, body, _) = concatMap getAssignedRegs $ blockToList body
assignedRegs = nub $ concatMap (getRegsBlock . blockSplit) cmmBlocks
isLive r = r `elem` alwaysLive || r `elem` live
dflags <- getDynFlags
stmtss <- flip mapM assignedRegs $ \reg ->
case reg of
CmmLocal (LocalReg un _) -> do
let (newv, stmts) = allocReg reg
varInsert un (pLower $ getVarType newv)
return stmts
CmmGlobal r -> do
let reg = lmGlobalRegVar dflags r
arg = lmGlobalRegArg dflags r
ty = (pLower . getVarType) reg
trash = LMLitVar $ LMUndefLit ty
rval = if isLive r then arg else trash
alloc = Assignment reg $ Alloca (pLower $ getVarType reg) 1
markStackReg r
return $ toOL [alloc, Store rval reg]
return (concatOL stmtss, [])
-- | Function epilogue. Load STG variables to use as argument for call.
-- STG Liveness optimisation done here.
funEpilogue :: LiveGlobalRegs -> LlvmM ([LlvmVar], LlvmStatements)
funEpilogue live = do
-- Have information and liveness optimisation is enabled?
let liveRegs = alwaysLive ++ live
isSSE (FloatReg _) = True
isSSE (DoubleReg _) = True
isSSE (XmmReg _) = True
isSSE (YmmReg _) = True
isSSE (ZmmReg _) = True
isSSE _ = False
-- Set to value or "undef" depending on whether the register is
-- actually live
dflags <- getDynFlags
let loadExpr r = do
(v, _, s) <- getCmmRegVal (CmmGlobal r)
return (Just $ v, s)
loadUndef r = do
let ty = (pLower . getVarType $ lmGlobalRegVar dflags r)
return (Just $ LMLitVar $ LMUndefLit ty, nilOL)
platform <- getDynFlag targetPlatform
loads <- flip mapM (activeStgRegs platform) $ \r -> case () of
_ | r `elem` liveRegs -> loadExpr r
| not (isSSE r) -> loadUndef r
| otherwise -> return (Nothing, nilOL)
let (vars, stmts) = unzip loads
return (catMaybes vars, concatOL stmts)
-- | A series of statements to trash all the STG registers.
--
-- In LLVM we pass the STG registers around everywhere in function calls.
-- So this means LLVM considers them live across the entire function, when
-- in reality they usually aren't. For Caller save registers across C calls
-- the saving and restoring of them is done by the Cmm code generator,
-- using Cmm local vars. So to stop LLVM saving them as well (and saving
-- all of them since it thinks they're always live, we trash them just
-- before the call by assigning the 'undef' value to them. The ones we
-- need are restored from the Cmm local var and the ones we don't need
-- are fine to be trashed.
getTrashStmts :: LlvmM LlvmStatements
getTrashStmts = do
regs <- getTrashRegs
stmts <- flip mapM regs $ \ r -> do
reg <- getCmmReg (CmmGlobal r)
let ty = (pLower . getVarType) reg
return $ Store (LMLitVar $ LMUndefLit ty) reg
return $ toOL stmts
getTrashRegs :: LlvmM [GlobalReg]
getTrashRegs = do plat <- getLlvmPlatform
return $ filter (callerSaves plat) (activeStgRegs plat)
-- | Get a function pointer to the CLabel specified.
--
-- This is for Haskell functions, function type is assumed, so doesn't work
-- with foreign functions.
getHsFunc :: LiveGlobalRegs -> CLabel -> LlvmM ExprData
getHsFunc live lbl
= do fty <- llvmFunTy live
name <- strCLabel_llvm lbl
getHsFunc' name fty
getHsFunc' :: LMString -> LlvmType -> LlvmM ExprData
getHsFunc' name fty
= do fun <- getGlobalPtr name
if getVarType fun == fty
then return (fun, nilOL, [])
else do (v1, s1) <- doExpr (pLift fty)
$ Cast LM_Bitcast fun (pLift fty)
return (v1, unitOL s1, [])
-- | Create a new local var
mkLocalVar :: LlvmType -> LlvmM LlvmVar
mkLocalVar ty = do
un <- getUniqueM
return $ LMLocalVar un ty
-- | Execute an expression, assigning result to a var
doExpr :: LlvmType -> LlvmExpression -> LlvmM (LlvmVar, LlvmStatement)
doExpr ty expr = do
v <- mkLocalVar ty
return (v, Assignment v expr)
-- | Expand CmmRegOff
expandCmmReg :: DynFlags -> (CmmReg, Int) -> CmmExpr
expandCmmReg dflags (reg, off)
= let width = typeWidth (cmmRegType dflags reg)
voff = CmmLit $ CmmInt (fromIntegral off) width
in CmmMachOp (MO_Add width) [CmmReg reg, voff]
-- | Convert a block id into a appropriate Llvm label
blockIdToLlvm :: BlockId -> LlvmVar
blockIdToLlvm bid = LMLocalVar (getUnique bid) LMLabel
-- | Create Llvm int Literal
mkIntLit :: Integral a => LlvmType -> a -> LlvmVar
mkIntLit ty i = LMLitVar $ LMIntLit (toInteger i) ty
-- | Convert int type to a LLvmVar of word or i32 size
toI32 :: Integral a => a -> LlvmVar
toI32 = mkIntLit i32
toIWord :: Integral a => DynFlags -> a -> LlvmVar
toIWord dflags = mkIntLit (llvmWord dflags)
-- | Error functions
panic :: String -> a
panic s = Outputable.panic $ "LlvmCodeGen.CodeGen." ++ s
pprPanic :: String -> SDoc -> a
pprPanic s d = Outputable.pprPanic ("LlvmCodeGen.CodeGen." ++ s) d
-- | Returns TBAA meta data by unique
getTBAAMeta :: Unique -> LlvmM [MetaAnnot]
getTBAAMeta u = do
mi <- getUniqMeta u
return [MetaAnnot tbaa (MetaNode i) | let Just i = mi]
-- | Returns TBAA meta data for given register
getTBAARegMeta :: GlobalReg -> LlvmM [MetaAnnot]
getTBAARegMeta = getTBAAMeta . getTBAA
-- | A more convenient way of accumulating LLVM statements and declarations.
data LlvmAccum = LlvmAccum LlvmStatements [LlvmCmmDecl]
instance Semigroup LlvmAccum where
LlvmAccum stmtsA declsA <> LlvmAccum stmtsB declsB =
LlvmAccum (stmtsA Semigroup.<> stmtsB) (declsA Semigroup.<> declsB)
instance Monoid LlvmAccum where
mempty = LlvmAccum nilOL []
mappend = (Semigroup.<>)
liftExprData :: LlvmM ExprData -> WriterT LlvmAccum LlvmM LlvmVar
liftExprData action = do
(var, stmts, decls) <- lift action
tell $ LlvmAccum stmts decls
return var
statement :: LlvmStatement -> WriterT LlvmAccum LlvmM ()
statement stmt = tell $ LlvmAccum (unitOL stmt) []
doExprW :: LlvmType -> LlvmExpression -> WriterT LlvmAccum LlvmM LlvmVar
doExprW a b = do
(var, stmt) <- lift $ doExpr a b
statement stmt
return var
exprToVarW :: CmmExpr -> WriterT LlvmAccum LlvmM LlvmVar
exprToVarW = liftExprData . exprToVar
runExprData :: WriterT LlvmAccum LlvmM LlvmVar -> LlvmM ExprData
runExprData action = do
(var, LlvmAccum stmts decls) <- runWriterT action
return (var, stmts, decls)
runStmtsDecls :: WriterT LlvmAccum LlvmM () -> LlvmM (LlvmStatements, [LlvmCmmDecl])
runStmtsDecls action = do
LlvmAccum stmts decls <- execWriterT action
return (stmts, decls)
getCmmRegW :: CmmReg -> WriterT LlvmAccum LlvmM LlvmVar
getCmmRegW = lift . getCmmReg
genLoadW :: Atomic -> CmmExpr -> CmmType -> WriterT LlvmAccum LlvmM LlvmVar
genLoadW atomic e ty = liftExprData $ genLoad atomic e ty
doTrashStmts :: WriterT LlvmAccum LlvmM ()
doTrashStmts = do
stmts <- lift getTrashStmts
tell $ LlvmAccum stmts mempty
| ezyang/ghc | compiler/llvmGen/LlvmCodeGen/CodeGen.hs | bsd-3-clause | 72,435 | 16 | 25 | 21,241 | 19,714 | 9,754 | 9,960 | 1,260 | 65 |
module Wham.Parser(
Statement(..),
ArithmeticExp(..),
BooleanExp(..),
parser,
parse) where
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Expr
import qualified Text.ParserCombinators.Parsec.Token as T
import Text.ParserCombinators.Parsec.Language (emptyDef)
import Wham.AST
-- lexer
lexer :: T.TokenParser ()
lexer = T.makeTokenParser whileDef
whileDef :: T.LanguageDef st
whileDef = (emptyDef
{T.reservedOpNames = ["*", "+", "-", "!", "&", "=", "<=", ":=",
";"],
T.reservedNames = ["true", "false", "skip", "if", "then",
"else", "while", "do", "try", "catch"],
T.identStart = letter,
T.commentLine = "#"
})
identifier :: CharParser () String
identifier = T.identifier lexer
reservedOp :: String -> CharParser () ()
reservedOp = T.reservedOp lexer
reserved :: String -> CharParser () ()
reserved = T.reserved lexer
integer :: CharParser () Integer
integer = T.integer lexer
parens :: CharParser () a -> CharParser () a
parens = T.parens lexer
whitespace :: CharParser () ()
whitespace = T.whiteSpace lexer
-- parser
parser :: Parser Statement
parser = do whitespace
result <- statements
eof
return result
statements :: Parser Statement
statements = buildExpressionParser statementOperators statement
statement :: Parser Statement
statement = skip
<|> conditional
<|> while
<|> assign
<|> tryCatch
<|> parens statements
statementOperators :: OperatorTable Char () Statement
statementOperators = [[binary ";" compound AssocLeft]]
compound :: Statement -> Statement -> Statement
compound s1 s2 = Compound s1 s2
skip :: Parser Statement
skip = do reserved "skip"
return (Skip)
conditional :: Parser Statement
conditional = do reserved "if"
condition <- booleanExpression
reserved "then"
trueStatement <- statements
reserved "else"
falseStatement <- statements
return (If condition trueStatement falseStatement)
while :: Parser Statement
while = do reserved "while"
condition <- booleanExpression
reserved "do"
s <- statements
return (While condition s)
assign :: Parser Statement
assign = do name <- identifier
reservedOp ":="
value <- arithmeticExpression
return (Assign name value)
tryCatch :: Parser Statement
tryCatch = do reserved "try"
s1 <- statements
reserved "catch"
s2 <- statements
return (TryCatch s1 s2)
booleanExpression :: Parser BooleanExp
booleanExpression = buildExpressionParser booleanOperators boolean
boolean :: Parser BooleanExp
boolean = true
<|> false
<|> try (arithmeticComparison)
<|> parens booleanExpression
booleanOperators :: OperatorTable Char () BooleanExp
booleanOperators = [[prefix "!" Wham.Parser.not],
[binary "&" Wham.Parser.and AssocLeft]]
arithmeticComparison :: Parser BooleanExp
arithmeticComparison = do { l <- arithmeticExpression;
do { reservedOp "=";
equals l}
<|>
do { reservedOp "<=";
lessOrEquals l}
}
equals :: ArithmeticExp -> Parser BooleanExp
equals l = do r <- arithmeticExpression
return (Equal l r)
lessOrEquals :: ArithmeticExp -> Parser BooleanExp
lessOrEquals l = do r <- arithmeticExpression
return (LessOrEqual l r)
not :: BooleanExp -> BooleanExp
not b = Not b
and :: BooleanExp -> BooleanExp -> BooleanExp
and l r = And l r
true :: Parser BooleanExp
true = do reserved "true"
return (Boolean True)
false :: Parser BooleanExp
false = do reserved "false"
return (Boolean False)
arithmeticExpression :: Parser ArithmeticExp
arithmeticExpression = buildExpressionParser arithmeticOperators arithmetic
arithmetic :: Parser ArithmeticExp
arithmetic = number
<|> variable
<|> parens arithmeticExpression
number :: Parser ArithmeticExp
number = do n <- integer
return (Number n)
variable :: Parser ArithmeticExp
variable = do var <- identifier
return (Variable var)
arithmeticOperators :: OperatorTable Char () ArithmeticExp
arithmeticOperators = [[binary "*" mul AssocLeft, binary "/" divide AssocLeft],
[binary "+" add AssocLeft, binary "-" sub AssocLeft]]
binary :: String -> (a -> a -> a) -> Assoc -> Operator Char () a
binary symbol operation assoc = Infix (do {reservedOp symbol;
return operation})
assoc
prefix :: String -> (a -> a) -> Operator Char () a
prefix symbol operation = Prefix (do { reservedOp symbol;
return operation })
mul :: ArithmeticExp -> ArithmeticExp -> ArithmeticExp
mul l r = Mul l r
divide :: ArithmeticExp -> ArithmeticExp -> ArithmeticExp
divide l r = Div l r
add :: ArithmeticExp -> ArithmeticExp -> ArithmeticExp
add l r = Add l r
sub :: ArithmeticExp -> ArithmeticExp -> ArithmeticExp
sub l r = Sub l r
| helino/wham | src/Wham/Parser.hs | bsd-3-clause | 5,460 | 0 | 10 | 1,667 | 1,528 | 773 | 755 | 142 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverlappingInstances #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module TreeUtilSpec (spec) where
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Instances ()
import Prelude hiding (length, filter)
import qualified Data.List as L
import Data.Tree
import Data.Tree.Util
{-# ANN spec ("HLint: ignore Redundant do"::String) #-}
{-# ANN spec ("HLint: ignore Use mappend"::String) #-}
spec :: Spec
spec = do
------------------------------------------------------------------------------
-- Maybe ---------------------------------------------------------------------
------------------------------------------------------------------------------
describe "Data.Tree.Util" $ do
let bigTree, tree5, tree6 :: Tree Int
bigTree = Node 0 [ Node 2 [ Node 5 [Node 6 []]
, Node 4 [Node 3 []]]
, Node 1 [Node 5 []]
]
tree5 = Node 5 []
tree6 = Node 6 []
it "mirror" $ do
mirror bigTree `shouldBe` Node 0 [ Node 1 [ Node 5 []]
, Node 2 [ Node 4 [ Node 3 []]
, Node 5 [ Node 6 []]
]
]
it "lookupTreeBy" $ do
lookupTreeBy (==5) tree5 `shouldBe` Just (Node 5 [])
lookupTreeBy (==5) tree6 `shouldBe` Nothing
lookupTreeBy (==5) bigTree `shouldBe` Just (Node 5 [Node 6 []])
it "find result size is <= original tree size" . property
$ \(tr :: Tree Int) -> size tr >= (maybe 0 size . lookupTreeBy (== 0) $ tr)
it "find result is a subset of original tree" . property
$ \(tr :: Tree Int) -> let f = lookupTreeBy (== 0) tr
in case f of
Nothing -> True
Just x -> flatten x `L.isInfixOf` flatten tr
it "lookupTree" $ do
lookupTree 5 tree5 `shouldBe` Just (Node 5 [])
lookupTree 5 tree6 `shouldBe` Nothing
lookupTree 5 bigTree `shouldBe` Just (Node 5 [Node 6 []])
it "lookupTree result size is <= original tree size" . property
$ \(tr :: Tree Int) -> size tr >= (maybe 0 size . lookupTree 0 $ tr)
it "lookupTree result is a subset of original tree" . property
$ \(tr :: Tree Int) -> let f = lookupTree 0 tr
in case f of
Nothing -> True
Just x -> flatten x `L.isInfixOf` flatten tr
it "lookupTree vs lookupTreeBy" . property
$ \(tr :: Tree Int) -> lookupTree 0 tr == lookupTreeBy (==0) tr
{-it "filter" $ do-}
{-filter (== 0) bigTree `shouldBe` Node 0 []-}
{-filter (/= 2) bigTree `shouldBe` Node 0 [Node 1 [Node 5 []]]-}
{-filter (== 1) bigTree `shouldBe` Node 0 [Node 1 []]-}
{-filter (== 9) bigTree `shouldBe` Node 0 []-}
{-filter (== 5) bigTree `shouldBe` Node 0 [ Node 1 [ Node 5 []]]-}
{-filter (== 3) bigTree `shouldBe` Node 0 [ Node 2 [ Node 4 [ Node 3 []]]]-}
{-filter (== 1) wrongTree `shouldBe` wrongTree-}
{-it "filterSub" $ do-}
{-filterSub (== 0) bigTree `shouldBe` Just (Node 0 [])-}
{-filterSub (/= 2) bigTree `shouldBe` Just (Node 0 [Node 1 [Node 5 []]])-}
{-filterSub (== 1) bigTree `shouldBe` Nothing -- Node 0 [Node 1 []]-}
{-it "filterSub vs filterPruneTree" . property-}
{-$ \(tr :: Tree Int) -> filterSub (==0) tr == filterPruneTree (== 0) tr-}
it "size" . property
$ \(tr :: Tree Int) -> size tr == (L.length . flatten $ tr)
it "maxDepth" $ do
maxDepth bigTree `shouldBe` 4
maxDepth tree5 `shouldBe` 1
it "prune" . property $
\(tr :: Tree Int,mx :: Positive Int) -> maxDepth (prune (getPositive mx) tr)
<= getPositive mx + 1
| jcristovao/tree-util | test/TreeUtilSpec.hs | bsd-3-clause | 4,019 | 0 | 21 | 1,319 | 959 | 491 | 468 | 60 | 3 |
{-# LANGUAGE
DeriveFoldable
, DeriveFunctor
, DeriveGeneric
, DeriveTraversable
, FlexibleContexts
, FlexibleInstances
, LambdaCase
, MultiParamTypeClasses
, Rank2Types
, StandaloneDeriving
, UndecidableInstances
, ViewPatterns #-}
module Type.Syntactic
( module Type.BindingFlag
, MonoType (..)
, var
, arr
, PolyType (..)
, mono
, bot
, forall
) where
import Control.Applicative
import Control.Comonad.Env (ComonadEnv)
import Control.Lens
import Data.Foldable
import Data.Function (fix)
import GHC.Generics (Generic)
import System.Console.Terminfo.PrettyPrint
import Text.PrettyPrint.Free
import Hoist
import Type.BindingFlag
data MonoType w a
= Var a
| Arr (w (MonoType w a)) (w (MonoType w a))
deriving (Functor, Foldable, Traversable, Generic)
deriving instance ( Show a
, Show (w (MonoType w a))
) => Show (MonoType w a)
instance ( Pretty a
, Pretty (w (MonoType w a))
) => Pretty (MonoType w a) where
pretty = \ case
Var x -> pretty x
Arr a b -> pretty a <+> text "->" <+> pretty b
instance ( PrettyTerm a
, PrettyTerm (w (MonoType w a))
) => PrettyTerm (MonoType w a) where
prettyTerm = \ case
Var x -> prettyTerm x
Arr a b -> prettyTerm a <+> text "->" <+> prettyTerm b
instance FunctorHoist MonoType where
hoist f = fix $ \ rec -> \ case
Var x -> Var x
Arr a b -> Arr (f $ rec <$> a) (f $ rec <$> b)
instance VariantA (MonoType w a) (MonoType w a) a a
instance VariantB
(MonoType w a) (MonoType w a)
(w (MonoType w a), w (MonoType w a)) (w (MonoType w a), w (MonoType w a))
var :: Prism' (MonoType w a) a
var = _A
arr :: Prism' (MonoType w a) (w (MonoType w a), w (MonoType w a))
arr = _B
data PolyType w a
= Mono (MonoType w a)
| Bot
| Forall a BindingFlag (w (PolyType w a)) (w (PolyType w a))
deriving (Functor, Foldable, Traversable, Generic)
deriving instance ( Show a
, Show (w (MonoType w a))
, Show (w (PolyType w a))
) => Show (PolyType w a)
instance ( Pretty a
, Pretty (w (MonoType w a))
, Pretty (w (PolyType w a))
) => Pretty (PolyType w a) where
pretty = \ case
Mono t -> pretty t
Bot -> text "_|_"
Forall x bf a b ->
lparen <>
pretty x <+>
pretty bf <+>
pretty a <>
rparen <+>
pretty b
instance ( ComonadEnv ScopedEffect w
, PrettyTerm a
, PrettyTerm (w (MonoType w a))
, PrettyTerm (w (PolyType w a))
) => PrettyTerm (PolyType w a) where
prettyTerm = \ case
Mono t -> prettyTerm t
Bot -> text "_|_"
Forall x bf a b ->
lparen <>
prettyTerm x <+>
pretty bf <+>
prettyTerm a <>
rparen <+>
prettyTerm b
instance FunctorHoist PolyType where
hoist f = fix $ \ rec -> \ case
Mono t -> Mono $ hoist f t
Bot -> Bot
Forall a bf o o' -> Forall a bf (f $ rec <$> o) (f $ rec <$> o')
instance VariantA (PolyType w a) (PolyType w a) (MonoType w a) (MonoType w a)
instance VariantB (PolyType w a) (PolyType w a) () ()
instance VariantC
(PolyType w a)
(PolyType w a)
(a, BindingFlag, w (PolyType w a), w (PolyType w a))
(a, BindingFlag, w (PolyType w a), w (PolyType w a))
mono :: Prism' (PolyType w a) (MonoType w a)
mono = _A
bot :: Prism' (PolyType w a) ()
bot = _B
forall :: Prism' (PolyType w a) (a, BindingFlag, w (PolyType w a), w (PolyType w a))
forall = _C
| sonyandy/mlf | src/Type/Syntactic.hs | bsd-3-clause | 3,587 | 0 | 14 | 1,099 | 1,488 | 759 | 729 | 119 | 1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TypeFamilies #-}
-- Create a source distribution tarball
module Stack.SDist
( getSDistTarball
, checkSDistTarball
, checkSDistTarball'
, SDistOpts (..)
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent.Execute (ActionContext(..))
import Control.Monad (unless, void, liftM, filterM, foldM, when)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader.Class (local)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Unlift (MonadBaseUnlift)
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy as L
import Data.Char (toLower)
import Data.Data (Data, Typeable, cast, gmapT)
import Data.Either (partitionEithers)
import Data.IORef (newIORef, readIORef, writeIORef)
import Data.List
import Data.List.Extra (nubOrd)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe, catMaybes)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Time.Clock.POSIX
import Distribution.Package (Dependency (..))
import qualified Distribution.PackageDescription as Cabal
import qualified Distribution.PackageDescription.Check as Check
import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription)
import Distribution.Text (display)
import Distribution.Version (simplifyVersionRange, orLaterVersion, earlierVersion)
import Distribution.Version.Extra
import Lens.Micro (set)
import Path
import Path.IO hiding (getModificationTime, getPermissions)
import Prelude -- Fix redundant import warnings
import Stack.Build (mkBaseConfigOpts, build)
import Stack.Build.Execute
import Stack.Build.Installed
import Stack.Build.Source (loadSourceMap, getDefaultPackageConfig)
import Stack.Build.Target
import Stack.Config (resolvePackageEntry, removePathFromPackageEntry)
import Stack.Constants
import Stack.Package
import Stack.Types.Build
import Stack.Types.Config
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.StackT
import Stack.Types.StringError
import Stack.Types.Version
import System.Directory (getModificationTime, getPermissions)
import qualified System.FilePath as FP
-- | Special exception to throw when you want to fail because of bad results
-- of package check.
data SDistOpts = SDistOpts
{ sdoptsDirsToWorkWith :: [String]
-- ^ Directories to package
, sdoptsPvpBounds :: Maybe PvpBounds
-- ^ PVP Bounds overrides
, sdoptsIgnoreCheck :: Bool
-- ^ Whether to ignore check of the package for common errors
, sdoptsSign :: Bool
-- ^ Whether to sign the package
, sdoptsSignServerUrl :: String
-- ^ The URL of the signature server
, sdoptsBuildTarball :: Bool
-- ^ Whether to build the tarball
}
newtype CheckException
= CheckException (NonEmpty Check.PackageCheck)
deriving (Typeable)
instance Exception CheckException
instance Show CheckException where
show (CheckException xs) =
"Package check reported the following errors:\n" ++
(intercalate "\n" . fmap show . NE.toList $ xs)
-- | Given the path to a local package, creates its source
-- distribution tarball.
--
-- While this yields a 'FilePath', the name of the tarball, this
-- tarball is not written to the disk and instead yielded as a lazy
-- bytestring.
getSDistTarball
:: (StackM env m, HasEnvConfig env)
=> Maybe PvpBounds -- ^ Override Config value
-> Path Abs Dir -- ^ Path to local package
-> m (FilePath, L.ByteString, Maybe (PackageIdentifier, L.ByteString))
-- ^ Filename, tarball contents, and option cabal file revision to upload
getSDistTarball mpvpBounds pkgDir = do
config <- view configL
let PvpBounds pvpBounds asRevision = fromMaybe (configPvpBounds config) mpvpBounds
tweakCabal = pvpBounds /= PvpBoundsNone
pkgFp = toFilePath pkgDir
lp <- readLocalPackage pkgDir
$logInfo $ "Getting file list for " <> T.pack pkgFp
(fileList, cabalfp) <- getSDistFileList lp
$logInfo $ "Building sdist tarball for " <> T.pack pkgFp
files <- normalizeTarballPaths (lines fileList)
-- We're going to loop below and eventually find the cabal
-- file. When we do, we'll upload this reference, if the
-- mpvpBounds value indicates that we should be uploading a cabal
-- file revision.
cabalFileRevisionRef <- liftIO (newIORef Nothing)
-- NOTE: Could make this use lazy I/O to only read files as needed
-- for upload (both GZip.compress and Tar.write are lazy).
-- However, it seems less error prone and more predictable to read
-- everything in at once, so that's what we're doing for now:
let tarPath isDir fp = either throwString return
(Tar.toTarPath isDir (forceUtf8Enc (pkgId FP.</> fp)))
-- convert a String of proper characters to a String of bytes
-- in UTF8 encoding masquerading as characters. This is
-- necessary for tricking the tar package into proper
-- character encoding.
forceUtf8Enc = S8.unpack . T.encodeUtf8 . T.pack
packWith f isDir fp = liftIO $ f (pkgFp FP.</> fp) =<< tarPath isDir fp
packDir = packWith Tar.packDirectoryEntry True
packFile fp
-- This is a cabal file, we're going to tweak it, but only
-- tweak it as a revision.
| tweakCabal && isCabalFp fp && asRevision = do
lbsIdent <- getCabalLbs pvpBounds (Just 1) $ toFilePath cabalfp
liftIO (writeIORef cabalFileRevisionRef (Just lbsIdent))
packWith packFileEntry False fp
-- Same, except we'll include the cabal file in the
-- original tarball upload.
| tweakCabal && isCabalFp fp = do
(_ident, lbs) <- getCabalLbs pvpBounds Nothing $ toFilePath cabalfp
currTime <- liftIO getPOSIXTime -- Seconds from UNIX epoch
tp <- liftIO $ tarPath False fp
return $ (Tar.fileEntry tp lbs) { Tar.entryTime = floor currTime }
| otherwise = packWith packFileEntry False fp
isCabalFp fp = toFilePath pkgDir FP.</> fp == toFilePath cabalfp
tarName = pkgId FP.<.> "tar.gz"
pkgId = packageIdentifierString (packageIdentifier (lpPackage lp))
dirEntries <- mapM packDir (dirsFromFiles files)
fileEntries <- mapM packFile files
mcabalFileRevision <- liftIO (readIORef cabalFileRevisionRef)
return (tarName, GZip.compress (Tar.write (dirEntries ++ fileEntries)), mcabalFileRevision)
-- | Get the PVP bounds-enabled version of the given cabal file
getCabalLbs :: (StackM env m, HasEnvConfig env)
=> PvpBoundsType
-> Maybe Int -- ^ optional revision
-> FilePath
-> m (PackageIdentifier, L.ByteString)
getCabalLbs pvpBounds mrev fp = do
bs <- liftIO $ S.readFile fp
(_warnings, gpd) <- readPackageUnresolvedBS Nothing bs
(_, sourceMap) <- loadSourceMap AllowNoTargets defaultBuildOptsCLI
menv <- getMinimalEnvOverride
(installedMap, _, _, _) <- getInstalled menv GetInstalledOpts
{ getInstalledProfiling = False
, getInstalledHaddock = False
, getInstalledSymbols = False
}
sourceMap
let gpd' = gtraverseT (addBounds sourceMap installedMap) gpd
gpd'' =
case mrev of
Nothing -> gpd'
Just rev -> gpd'
{ Cabal.packageDescription
= (Cabal.packageDescription gpd')
{ Cabal.customFieldsPD
= (("x-revision", show rev):)
$ filter (\(x, _) -> map toLower x /= "x-revision")
$ Cabal.customFieldsPD
$ Cabal.packageDescription gpd'
}
}
ident <- parsePackageIdentifierFromString $ display $ Cabal.package $ Cabal.packageDescription gpd''
return
( ident
, TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd''
)
where
addBounds :: SourceMap -> InstalledMap -> Dependency -> Dependency
addBounds sourceMap installedMap dep@(Dependency cname range) =
case lookupVersion (fromCabalPackageName cname) of
Nothing -> dep
Just version -> Dependency cname $ simplifyVersionRange
$ (if toAddUpper && not (hasUpper range) then addUpper version else id)
$ (if toAddLower && not (hasLower range) then addLower version else id)
range
where
lookupVersion name =
case Map.lookup name sourceMap of
Just (PSLocal lp) -> Just $ packageVersion $ lpPackage lp
Just (PSUpstream version _ _ _ _) -> Just version
Nothing ->
case Map.lookup name installedMap of
Just (_, installed) -> Just (installedVersion installed)
Nothing -> Nothing
addUpper version = intersectVersionRanges
(earlierVersion $ toCabalVersion $ nextMajorVersion version)
addLower version = intersectVersionRanges
(orLaterVersion (toCabalVersion version))
(toAddLower, toAddUpper) =
case pvpBounds of
PvpBoundsNone -> (False, False)
PvpBoundsUpper -> (False, True)
PvpBoundsLower -> (True, False)
PvpBoundsBoth -> (True, True)
-- | Traverse a data type.
gtraverseT :: (Data a,Typeable b) => (Typeable b => b -> b) -> a -> a
gtraverseT f =
gmapT (\x -> case cast x of
Nothing -> gtraverseT f x
Just b -> fromMaybe x (cast (f b)))
-- | Read in a 'LocalPackage' config. This makes some default decisions
-- about 'LocalPackage' fields that might not be appropriate for other
-- use-cases.
readLocalPackage :: (StackM env m, HasEnvConfig env) => Path Abs Dir -> m LocalPackage
readLocalPackage pkgDir = do
cabalfp <- findOrGenerateCabalFile pkgDir
config <- getDefaultPackageConfig
(warnings,package) <- readPackage config cabalfp
mapM_ (printCabalFileWarning cabalfp) warnings
return LocalPackage
{ lpPackage = package
, lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file.
, lpDir = pkgDir
, lpCabalFile = cabalfp
-- NOTE: these aren't the 'correct values, but aren't used in
-- the usage of this function in this module.
, lpTestDeps = Map.empty
, lpBenchDeps = Map.empty
, lpTestBench = Nothing
, lpForceDirty = False
, lpDirtyFiles = Nothing
, lpNewBuildCache = Map.empty
, lpFiles = Set.empty
, lpComponents = Set.empty
, lpUnbuildable = Set.empty
}
-- | Returns a newline-separate list of paths, and the absolute path to the .cabal file.
getSDistFileList :: (StackM env m, HasEnvConfig env) => LocalPackage -> m (String, Path Abs File)
getSDistFileList lp =
withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> do
menv <- getMinimalEnvOverride
let bopts = defaultBuildOpts
let boptsCli = defaultBuildOptsCLI
baseConfigOpts <- mkBaseConfigOpts boptsCli
(locals, _) <- loadSourceMap NeedTargets boptsCli
runInBase <- liftBaseWith $ \run -> return (void . run)
withExecuteEnv menv bopts boptsCli baseConfigOpts locals
[] [] [] -- provide empty list of globals. This is a hack around custom Setup.hs files
$ \ee ->
withSingleContext runInBase ac ee task Nothing (Just "sdist") $ \_package cabalfp _pkgDir cabal _announce _console _mlogFile -> do
let outFile = toFilePath tmpdir FP.</> "source-files-list"
cabal KeepTHLoading ["sdist", "--list-sources", outFile]
contents <- liftIO (readFile outFile)
return (contents, cabalfp)
where
package = lpPackage lp
ac = ActionContext Set.empty []
task = Task
{ taskProvides = PackageIdentifier (packageName package) (packageVersion package)
, taskType = TTLocal lp
, taskConfigOpts = TaskConfigOpts
{ tcoMissing = Set.empty
, tcoOpts = \_ -> ConfigureOpts [] []
}
, taskPresent = Map.empty
, taskAllInOne = True
, taskCachePkgSrc = CacheSrcLocal (toFilePath (lpDir lp))
}
normalizeTarballPaths :: (StackM env m) => [FilePath] -> m [FilePath]
normalizeTarballPaths fps = do
-- TODO: consider whether erroring out is better - otherwise the
-- user might upload an incomplete tar?
unless (null outsideDir) $
$logWarn $ T.concat
[ "Warning: These files are outside of the package directory, and will be omitted from the tarball: "
, T.pack (show outsideDir)]
return (nubOrd files)
where
(outsideDir, files) = partitionEithers (map pathToEither fps)
pathToEither fp = maybe (Left fp) Right (normalizePath fp)
normalizePath :: FilePath -> Maybe FilePath
normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise
where
go [] = Just []
go ("..":_) = Nothing
go (_:"..":xs) = go xs
go (x:xs) = (x :) <$> go xs
dirsFromFiles :: [FilePath] -> [FilePath]
dirsFromFiles dirs = Set.toAscList (Set.delete "." results)
where
results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs
go s x
| Set.member x s = s
| otherwise = go (Set.insert x s) (FP.takeDirectory x)
-- | Check package in given tarball. This will log all warnings
-- and will throw an exception in case of critical errors.
--
-- Note that we temporarily decompress the archive to analyze it.
checkSDistTarball :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
=> SDistOpts -- ^ The configuration of what to check
-> Path Abs File -- ^ Absolute path to tarball
-> m ()
checkSDistTarball opts tarball = withTempTarGzContents tarball $ \pkgDir' -> do
pkgDir <- (pkgDir' </>) `liftM`
(parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball)
-- ^ drop ".tar" ^ drop ".gz"
when (sdoptsBuildTarball opts) (buildExtractedTarball pkgDir)
unless (sdoptsIgnoreCheck opts) (checkPackageInExtractedTarball pkgDir)
checkPackageInExtractedTarball :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
=> Path Abs Dir -- ^ Absolute path to tarball
-> m ()
checkPackageInExtractedTarball pkgDir = do
cabalfp <- findOrGenerateCabalFile pkgDir
name <- parsePackageNameFromFilePath cabalfp
config <- getDefaultPackageConfig
(gdesc, pkgDesc) <- readPackageDescriptionDir config pkgDir
$logInfo $
"Checking package '" <> packageNameText name <> "' for common mistakes"
let pkgChecks = Check.checkPackage gdesc (Just pkgDesc)
fileChecks <- liftIO $ Check.checkPackageFiles pkgDesc (toFilePath pkgDir)
let checks = pkgChecks ++ fileChecks
(errors, warnings) =
let criticalIssue (Check.PackageBuildImpossible _) = True
criticalIssue (Check.PackageDistInexcusable _) = True
criticalIssue _ = False
in partition criticalIssue checks
unless (null warnings) $
$logWarn $ "Package check reported the following warnings:\n" <>
T.pack (intercalate "\n" . fmap show $ warnings)
case NE.nonEmpty errors of
Nothing -> return ()
Just ne -> throwM $ CheckException ne
buildExtractedTarball :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m) => Path Abs Dir -> m ()
buildExtractedTarball pkgDir = do
projectRoot <- view projectRootL
envConfig <- view envConfigL
menv <- getMinimalEnvOverride
localPackageToBuild <- readLocalPackage pkgDir
let packageEntries = bcPackageEntries (envConfigBuildConfig envConfig)
getPaths entry = do
resolvedEntry <- resolvePackageEntry menv projectRoot entry
return $ fmap fst resolvedEntry
allPackagePaths <- fmap mconcat (mapM getPaths packageEntries)
-- We remove the path based on the name of the package
let isPathToRemove path = do
localPackage <- readLocalPackage path
return $ packageName (lpPackage localPackage) == packageName (lpPackage localPackageToBuild)
pathsToRemove <- filterM isPathToRemove allPackagePaths
let adjustPackageEntries entries path = do
adjustedPackageEntries <- mapM (removePathFromPackageEntry menv projectRoot path) entries
return (catMaybes adjustedPackageEntries)
entriesWithoutBuiltPackage <- foldM adjustPackageEntries packageEntries pathsToRemove
let newEntry = PackageEntry Nothing (PLFilePath (toFilePath pkgDir)) []
newPackagesRef <- liftIO (newIORef Nothing)
let adjustEnvForBuild env =
let updatedEnvConfig = envConfig
{envConfigPackagesRef = newPackagesRef
,envConfigBuildConfig = updatePackageInBuildConfig (envConfigBuildConfig envConfig)
}
in set envConfigL updatedEnvConfig env
updatePackageInBuildConfig buildConfig = buildConfig
{ bcPackageEntries = newEntry : entriesWithoutBuiltPackage
, bcConfig = (bcConfig buildConfig)
{ configBuild = defaultBuildOpts
{ boptsTests = True
}
}
}
local adjustEnvForBuild $
build (const (return ())) Nothing defaultBuildOptsCLI
-- | Version of 'checkSDistTarball' that first saves lazy bytestring to
-- temporary directory and then calls 'checkSDistTarball' on it.
checkSDistTarball' :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
=> SDistOpts
-> String -- ^ Tarball name
-> L.ByteString -- ^ Tarball contents as a byte string
-> m ()
checkSDistTarball' opts name bytes = withSystemTempDir "stack" $ \tpath -> do
npath <- (tpath </>) `liftM` parseRelFile name
liftIO $ L.writeFile (toFilePath npath) bytes
checkSDistTarball opts npath
withTempTarGzContents :: (MonadIO m, MonadMask m)
=> Path Abs File -- ^ Location of tarball
-> (Path Abs Dir -> m a) -- ^ Perform actions given dir with tarball contents
-> m a
withTempTarGzContents apath f = withSystemTempDir "stack" $ \tpath -> do
archive <- liftIO $ L.readFile (toFilePath apath)
liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive
f tpath
--------------------------------------------------------------------------------
-- Copy+modified from the tar package to avoid issues with lazy IO ( see
-- https://github.com/commercialhaskell/stack/issues/1344 )
packFileEntry :: FilePath -- ^ Full path to find the file on the local disk
-> Tar.TarPath -- ^ Path to use for the tar Entry in the archive
-> IO Tar.Entry
packFileEntry filepath tarpath = do
mtime <- getModTime filepath
perms <- getPermissions filepath
content <- S.readFile filepath
let size = fromIntegral (S.length content)
return (Tar.simpleEntry tarpath (Tar.NormalFile (L.fromStrict content) size)) {
Tar.entryPermissions = if executable perms then Tar.executableFilePermissions
else Tar.ordinaryFilePermissions,
Tar.entryTime = mtime
}
getModTime :: FilePath -> IO Tar.EpochTime
getModTime path = do
t <- getModificationTime path
return . floor . utcTimeToPOSIXSeconds $ t
| mrkkrp/stack | src/Stack/SDist.hs | bsd-3-clause | 20,550 | 0 | 24 | 5,323 | 4,677 | 2,449 | 2,228 | 358 | 11 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="az-AZ">
<title>HTTPS Info | ZAP Add-on</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/httpsInfo/src/main/javahelp/org/zaproxy/zap/extension/httpsinfo/resources/help_az_AZ/helpset_az_AZ.hs | apache-2.0 | 969 | 80 | 67 | 160 | 419 | 212 | 207 | -1 | -1 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE DeriveDataTypeable #-}
-- Create a source distribution tarball
module Stack.SDist
( getSDistTarball
, checkSDistTarball
, checkSDistTarball'
) where
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Codec.Compression.GZip as GZip
import Control.Applicative
import Control.Concurrent.Execute (ActionContext(..))
import Control.Monad (unless, void, liftM)
import Control.Monad.Catch
import Control.Monad.IO.Class
import Control.Monad.Logger
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.Trans.Control (liftBaseWith)
import Control.Monad.Trans.Resource
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
import Data.Data (Data, Typeable, cast, gmapT)
import Data.Either (partitionEithers)
import Data.List
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TLE
import Data.Time.Clock.POSIX
import Distribution.Package (Dependency (..))
import qualified Distribution.PackageDescription.Check as Check
import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription)
import Distribution.Version (simplifyVersionRange, orLaterVersion, earlierVersion)
import Distribution.Version.Extra
import Network.HTTP.Client.Conduit (HasHttpManager)
import Path
import Path.IO hiding (getModificationTime, getPermissions)
import Prelude -- Fix redundant import warnings
import Stack.Build (mkBaseConfigOpts)
import Stack.Build.Execute
import Stack.Build.Installed
import Stack.Build.Source (loadSourceMap, getPackageConfig)
import Stack.Build.Target
import Stack.Constants
import Stack.Package
import Stack.Types
import Stack.Types.Internal
import System.Directory (getModificationTime, getPermissions)
import qualified System.FilePath as FP
-- | Special exception to throw when you want to fail because of bad results
-- of package check.
data CheckException
= CheckException (NonEmpty Check.PackageCheck)
deriving (Typeable)
instance Exception CheckException
instance Show CheckException where
show (CheckException xs) =
"Package check reported the following errors:\n" ++
(intercalate "\n" . fmap show . NE.toList $ xs)
type M env m = (MonadIO m,MonadReader env m,HasHttpManager env,MonadLogger m,MonadBaseControl IO m,MonadMask m,HasLogLevel env,HasEnvConfig env,HasTerminal env)
-- | Given the path to a local package, creates its source
-- distribution tarball.
--
-- While this yields a 'FilePath', the name of the tarball, this
-- tarball is not written to the disk and instead yielded as a lazy
-- bytestring.
getSDistTarball
:: M env m
=> Maybe PvpBounds -- ^ Override Config value
-> Path Abs Dir -- ^ Path to local package
-> m (FilePath, L.ByteString) -- ^ Filename and tarball contents
getSDistTarball mpvpBounds pkgDir = do
config <- asks getConfig
let pvpBounds = fromMaybe (configPvpBounds config) mpvpBounds
tweakCabal = pvpBounds /= PvpBoundsNone
pkgFp = toFilePath pkgDir
lp <- readLocalPackage pkgDir
$logInfo $ "Getting file list for " <> T.pack pkgFp
(fileList, cabalfp) <- getSDistFileList lp
$logInfo $ "Building sdist tarball for " <> T.pack pkgFp
files <- normalizeTarballPaths (lines fileList)
-- NOTE: Could make this use lazy I/O to only read files as needed
-- for upload (both GZip.compress and Tar.write are lazy).
-- However, it seems less error prone and more predictable to read
-- everything in at once, so that's what we're doing for now:
let tarPath isDir fp = either error id
(Tar.toTarPath isDir (pkgId FP.</> fp))
packWith f isDir fp = liftIO $ f (pkgFp FP.</> fp) (tarPath isDir fp)
packDir = packWith Tar.packDirectoryEntry True
packFile fp
| tweakCabal && isCabalFp fp = do
lbs <- getCabalLbs pvpBounds $ toFilePath cabalfp
return $ Tar.fileEntry (tarPath False fp) lbs
| otherwise = packWith packFileEntry False fp
isCabalFp fp = toFilePath pkgDir FP.</> fp == toFilePath cabalfp
tarName = pkgId FP.<.> "tar.gz"
pkgId = packageIdentifierString (packageIdentifier (lpPackage lp))
dirEntries <- mapM packDir (dirsFromFiles files)
fileEntries <- mapM packFile files
return (tarName, GZip.compress (Tar.write (dirEntries ++ fileEntries)))
-- | Get the PVP bounds-enabled version of the given cabal file
getCabalLbs :: M env m => PvpBounds -> FilePath -> m L.ByteString
getCabalLbs pvpBounds fp = do
bs <- liftIO $ S.readFile fp
(_warnings, gpd) <- readPackageUnresolvedBS Nothing bs
(_, _, _, _, sourceMap) <- loadSourceMap AllowNoTargets defaultBuildOpts
menv <- getMinimalEnvOverride
(installedMap, _, _, _) <- getInstalled menv GetInstalledOpts
{ getInstalledProfiling = False
, getInstalledHaddock = False
}
sourceMap
let gpd' = gtraverseT (addBounds sourceMap installedMap) gpd
return $ TLE.encodeUtf8 $ TL.pack $ showGenericPackageDescription gpd'
where
addBounds :: SourceMap -> InstalledMap -> Dependency -> Dependency
addBounds sourceMap installedMap dep@(Dependency cname range) =
case lookupVersion (fromCabalPackageName cname) of
Nothing -> dep
Just version -> Dependency cname $ simplifyVersionRange
$ (if toAddUpper && not (hasUpper range) then addUpper version else id)
$ (if toAddLower && not (hasLower range) then addLower version else id)
range
where
lookupVersion name =
case Map.lookup name sourceMap of
Just (PSLocal lp) -> Just $ packageVersion $ lpPackage lp
Just (PSUpstream version _ _) -> Just version
Nothing ->
case Map.lookup name installedMap of
Just (_, installed) -> Just (installedVersion installed)
Nothing -> Nothing
addUpper version = intersectVersionRanges
(earlierVersion $ toCabalVersion $ nextMajorVersion version)
addLower version = intersectVersionRanges
(orLaterVersion (toCabalVersion version))
(toAddLower, toAddUpper) =
case pvpBounds of
PvpBoundsNone -> (False, False)
PvpBoundsUpper -> (False, True)
PvpBoundsLower -> (True, False)
PvpBoundsBoth -> (True, True)
-- | Traverse a data type.
gtraverseT :: (Data a,Typeable b) => (Typeable b => b -> b) -> a -> a
gtraverseT f =
gmapT (\x -> case cast x of
Nothing -> gtraverseT f x
Just b -> fromMaybe x (cast (f b)))
-- | Read in a 'LocalPackage' config. This makes some default decisions
-- about 'LocalPackage' fields that might not be appropriate for other
-- use-cases.
readLocalPackage :: M env m => Path Abs Dir -> m LocalPackage
readLocalPackage pkgDir = do
cabalfp <- getCabalFileName pkgDir
name <- parsePackageNameFromFilePath cabalfp
config <- getPackageConfig defaultBuildOpts name
(warnings,package) <- readPackage config cabalfp
mapM_ (printCabalFileWarning cabalfp) warnings
return LocalPackage
{ lpPackage = package
, lpWanted = False -- HACK: makes it so that sdist output goes to a log instead of a file.
, lpDir = pkgDir
, lpCabalFile = cabalfp
-- NOTE: these aren't the 'correct values, but aren't used in
-- the usage of this function in this module.
, lpTestDeps = Map.empty
, lpBenchDeps = Map.empty
, lpTestBench = Nothing
, lpDirtyFiles = Just Set.empty
, lpNewBuildCache = Map.empty
, lpFiles = Set.empty
, lpComponents = Set.empty
, lpUnbuildable = Set.empty
}
-- | Returns a newline-separate list of paths, and the absolute path to the .cabal file.
getSDistFileList :: M env m => LocalPackage -> m (String, Path Abs File)
getSDistFileList lp =
withSystemTempDir (stackProgName <> "-sdist") $ \tmpdir -> do
menv <- getMinimalEnvOverride
let bopts = defaultBuildOpts
baseConfigOpts <- mkBaseConfigOpts bopts
(_, _mbp, locals, _extraToBuild, _sourceMap) <- loadSourceMap NeedTargets bopts
runInBase <- liftBaseWith $ \run -> return (void . run)
withExecuteEnv menv bopts baseConfigOpts locals
[] [] [] -- provide empty list of globals. This is a hack around custom Setup.hs files
$ \ee ->
withSingleContext runInBase ac ee task Nothing (Just "sdist") $ \_package cabalfp _pkgDir cabal _announce _console _mlogFile -> do
let outFile = toFilePath tmpdir FP.</> "source-files-list"
cabal False ["sdist", "--list-sources", outFile]
contents <- liftIO (readFile outFile)
return (contents, cabalfp)
where
package = lpPackage lp
ac = ActionContext Set.empty
task = Task
{ taskProvides = PackageIdentifier (packageName package) (packageVersion package)
, taskType = TTLocal lp
, taskConfigOpts = TaskConfigOpts
{ tcoMissing = Set.empty
, tcoOpts = \_ -> ConfigureOpts [] []
}
, taskPresent = Map.empty
, taskAllInOne = True
}
normalizeTarballPaths :: M env m => [FilePath] -> m [FilePath]
normalizeTarballPaths fps = do
-- TODO: consider whether erroring out is better - otherwise the
-- user might upload an incomplete tar?
unless (null outsideDir) $
$logWarn $ T.concat
[ "Warning: These files are outside of the package directory, and will be omitted from the tarball: "
, T.pack (show outsideDir)]
return files
where
(outsideDir, files) = partitionEithers (map pathToEither fps)
pathToEither fp = maybe (Left fp) Right (normalizePath fp)
normalizePath :: FilePath -> Maybe FilePath
normalizePath = fmap FP.joinPath . go . FP.splitDirectories . FP.normalise
where
go [] = Just []
go ("..":_) = Nothing
go (_:"..":xs) = go xs
go (x:xs) = (x :) <$> go xs
dirsFromFiles :: [FilePath] -> [FilePath]
dirsFromFiles dirs = Set.toAscList (Set.delete "." results)
where
results = foldl' (\s -> go s . FP.takeDirectory) Set.empty dirs
go s x
| Set.member x s = s
| otherwise = go (Set.insert x s) (FP.takeDirectory x)
-- | Check package in given tarball. This will log all warnings
-- and will throw an exception in case of critical errors.
--
-- Note that we temporarily decompress the archive to analyze it.
checkSDistTarball :: (MonadIO m, MonadMask m, MonadThrow m, MonadCatch m, MonadLogger m, MonadReader env m, HasEnvConfig env)
=> Path Abs File -- ^ Absolute path to tarball
-> m ()
checkSDistTarball tarball = withTempTarGzContents tarball $ \pkgDir' -> do
pkgDir <- (pkgDir' </>) `liftM`
(parseRelDir . FP.takeBaseName . FP.takeBaseName . toFilePath $ tarball)
-- ^ drop ".tar" ^ drop ".gz"
cabalfp <- getCabalFileName pkgDir
name <- parsePackageNameFromFilePath cabalfp
config <- getPackageConfig defaultBuildOpts name
(gdesc, pkgDesc) <- readPackageDescriptionDir config pkgDir
$logInfo $
"Checking package '" <> packageNameText name <> "' for common mistakes"
let pkgChecks = Check.checkPackage gdesc (Just pkgDesc)
fileChecks <- liftIO $ Check.checkPackageFiles pkgDesc (toFilePath pkgDir)
let checks = pkgChecks ++ fileChecks
(errors, warnings) =
let criticalIssue (Check.PackageBuildImpossible _) = True
criticalIssue (Check.PackageDistInexcusable _) = True
criticalIssue _ = False
in partition criticalIssue checks
unless (null warnings) $
$logWarn $ "Package check reported the following warnings:\n" <>
T.pack (intercalate "\n" . fmap show $ warnings)
case NE.nonEmpty errors of
Nothing -> return ()
Just ne -> throwM $ CheckException ne
-- | Version of 'checkSDistTarball' that first saves lazy bytestring to
-- temporary directory and then calls 'checkSDistTarball' on it.
checkSDistTarball' :: (MonadIO m, MonadMask m, MonadThrow m, MonadCatch m, MonadLogger m, MonadReader env m, HasEnvConfig env)
=> String -- ^ Tarball name
-> L.ByteString -- ^ Tarball contents as a byte string
-> m ()
checkSDistTarball' name bytes = withSystemTempDir "stack" $ \tpath -> do
npath <- (tpath </>) `liftM` parseRelFile name
liftIO $ L.writeFile (toFilePath npath) bytes
checkSDistTarball npath
withTempTarGzContents :: (MonadIO m, MonadMask m, MonadThrow m)
=> Path Abs File -- ^ Location of tarball
-> (Path Abs Dir -> m a) -- ^ Perform actions given dir with tarball contents
-> m a
withTempTarGzContents apath f = withSystemTempDir "stack" $ \tpath -> do
archive <- liftIO $ L.readFile (toFilePath apath)
liftIO . Tar.unpack (toFilePath tpath) . Tar.read . GZip.decompress $ archive
f tpath
--------------------------------------------------------------------------------
-- Copy+modified from the tar package to avoid issues with lazy IO ( see
-- https://github.com/commercialhaskell/stack/issues/1344 )
packFileEntry :: FilePath -- ^ Full path to find the file on the local disk
-> Tar.TarPath -- ^ Path to use for the tar Entry in the archive
-> IO Tar.Entry
packFileEntry filepath tarpath = do
mtime <- getModTime filepath
perms <- getPermissions filepath
content <- S.readFile filepath
let size = fromIntegral (S.length content)
return (Tar.simpleEntry tarpath (Tar.NormalFile (L.fromStrict content) size)) {
Tar.entryPermissions = if executable perms then Tar.executableFilePermissions
else Tar.ordinaryFilePermissions,
Tar.entryTime = mtime
}
getModTime :: FilePath -> IO Tar.EpochTime
getModTime path = do
t <- getModificationTime path
return . floor . utcTimeToPOSIXSeconds $ t
| harendra-kumar/stack | src/Stack/SDist.hs | bsd-3-clause | 14,896 | 0 | 19 | 3,768 | 3,602 | 1,884 | 1,718 | 265 | 10 |
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Main where
import "base" Prelude
import Distribution.PackageDescription
import Distribution.Simple
main = do
defaultMainWithHooks simpleUserHooks
{ preConf = \_args _flags -> do putStrLn reminder
return emptyHookedBuildInfo
, postInst = (\_ _ _ _ -> putStrLn fayBaseReminder)
}
reminder =
" \n\
\- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n\
\ \n\
\ REMEMBER: This compiler is in flux, supercalifragelistic style. You should \n\
\ read the CHANGELOG for this release as the changes probably \n\
\ affect you. \n\
\ \n\
\ \n\
\ \n\
\- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n"
fayBaseReminder =
" \n\
\- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n\
\ \n\
\ You also need to install fay-base! \n\
\ \n\
\- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -\n"
| beni55/fay | Setup.hs | bsd-3-clause | 1,812 | 0 | 13 | 1,102 | 91 | 51 | 40 | 15 | 1 |
{-#LANGUAGE NoImplicitPrelude #-}
{-#LANGUAGE OverloadedStrings #-}
{-#LANGUAGE OverloadedLists #-}
{-#LANGUAGE LambdaCase #-}
module Web.Sprinkles.ProjectConfig
where
import Web.Sprinkles.Prelude
import Web.Sprinkles.Rule
import Data.Aeson as JSON
import Data.Aeson.TH
import qualified Data.Yaml as YAML
import Web.Sprinkles.Backends
import Data.Default
import System.FilePath.Glob (glob)
import System.Environment (getEnv, lookupEnv)
import Control.MaybeEitherMonad (maybeFail)
import Data.AList (AList)
import qualified Data.AList as AList
import Web.Sprinkles.Exceptions
data ProjectConfig =
ProjectConfig
{ pcContextData :: AList Text BackendSpec
, pcRules :: [Rule]
}
deriving (Show)
makeProjectPathsAbsolute :: FilePath -> ProjectConfig -> ProjectConfig
makeProjectPathsAbsolute dir (ProjectConfig context rules) =
ProjectConfig (fmap goBackendSpec context) (fmap goRule rules)
where
goBackendSpec = makeBackendSpecPathsAbsolute dir
goRule = makeRulePathsAbsolute dir
instance Default ProjectConfig where
def = ProjectConfig
{ pcContextData = AList.empty
, pcRules = []
}
instance Semigroup ProjectConfig where
(<>) = pcAppend
instance Monoid ProjectConfig where
mempty = def
mappend = pcAppend
instance FromJSON ProjectConfig where
parseJSON = withObject "ProjectConfig" $ \obj -> do
contextData <- fromMaybe AList.empty <$> obj .:? "data"
rulesValue <- fromMaybe (toJSON ([] :: [Value])) <$> (obj .:? "rules" <|> obj .:? "Rules")
rules <- parseJSON rulesValue
return ProjectConfig
{ pcContextData = contextData
, pcRules = rules
}
pcAppend :: ProjectConfig -> ProjectConfig -> ProjectConfig
pcAppend a b =
ProjectConfig
{ pcContextData = pcContextData a <> pcContextData b
, pcRules = pcRules a <> pcRules b
}
firstNonNull :: [a] -> [a] -> [a]
firstNonNull [] xs = xs
firstNonNull xs _ = xs
loadProjectConfigFile :: FilePath -> IO ProjectConfig
loadProjectConfigFile fn =
YAML.decodeFileEither fn >>=
either
(throwM . withSourceContext (pack fn))
return
loadProjectConfig :: FilePath -> IO ProjectConfig
loadProjectConfig dir =
fmap (makeProjectPathsAbsolute dir) . loadProjectConfigFile $ dir </> "project.yml"
| tdammers/templar | src/Web/Sprinkles/ProjectConfig.hs | bsd-3-clause | 2,372 | 0 | 16 | 517 | 596 | 327 | 269 | 62 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE ImpredicativeTypes #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Buffer.Misc
-- License : GPL-2
-- Maintainer : [email protected]
-- Stability : experimental
-- Portability : portable
--
-- The 'Buffer' module defines monadic editing operations over one-dimensional
-- buffers, maintaining a current /point/.
module Yi.Buffer.Misc
( FBuffer (FBuffer, bmode)
, BufferM (..)
, WinMarks, MarkSet (..)
, bkey
, getMarks
, runBuffer
, runBufferFull
, runBufferDummyWindow
, screenTopLn
, screenMidLn
, screenBotLn
, curLn
, curCol
, colOf
, lineOf
, lineCountB
, sizeB
, pointB
, pointOfLineColB
, solPointB
, eolPointB
, markLines
, moveTo
, moveToColB
, moveToLineColB
, lineMoveRel
, lineUp
, lineDown
, newB
, MarkValue (..)
, Overlay
(overlayAnnotation, overlayBegin, overlayEnd, overlayOwner, overlayStyle)
, mkOverlay
, gotoLn
, gotoLnFrom
, leftB
, rightB
, moveN
, leftN
, rightN
, insertN
, insertNAt
, insertB
, deleteN
, nelemsB
, writeB
, writeN
, newlineB
, deleteNAt
, readB
, elemsB
, undosA
, undoB
, redoB
, getMarkB
, setMarkHereB
, setNamedMarkHereB
, mayGetMarkB
, getMarkValueB
, markPointA
, modifyMarkB
, newMarkB
, deleteMarkB
, getVisibleSelection
, setVisibleSelection
, isUnchangedBuffer
, setAnyMode
, setMode
, setMode0
, modifyMode
, regexRegionB
, regexB
, readAtB
, getModeLine
, getPercent
, setInserting
, savingPrefCol
, forgetPreferCol
, movingToPrefCol
, movingToPrefVisCol
, preferColA
, markSavedB
, retroactivelyAtSavePointB
, addOverlayB
, delOverlayB
, delOverlaysOfOwnerB
, getOverlaysOfOwnerB
, isPointInsideOverlay
, savingExcursionB
, savingPointB
, savingPositionB
, pendingUpdatesA
, highlightSelectionA
, rectangleSelectionA
, readOnlyA
, insertingA
, pointFollowsWindowA
, revertPendingUpdatesB
, askWindow
, clearSyntax
, focusSyntax
, Mode (..)
, modeNameA
, modeAppliesA
, modeHLA
, modePrettifyA
, modeKeymapA
, modeIndentA
, modeAdjustBlockA
, modeFollowA
, modeIndentSettingsA
, modeToggleCommentSelectionA
, modeGetStrokesA
, modeOnLoadA
, modeGotoDeclarationA
, modeModeLineA
, AnyMode (..)
, IndentBehaviour (..)
, IndentSettings (..)
, expandTabsA
, tabSizeA
, shiftWidthA
, modeAlwaysApplies
, modeNeverApplies
, emptyMode
, withModeB
, withMode0
, onMode
, withSyntaxB
, withSyntaxB'
, keymapProcessA
, strokesRangesB
, streamB
, indexedStreamB
, askMarks
, pointAt
, SearchExp
, lastActiveWindowA
, putBufferDyn
, getBufferDyn
, shortIdentString
, identString
, miniIdentString
, identA
, directoryContentA
, BufferId (..)
, file
, lastSyncTimeA
, replaceCharB
, replaceCharWithBelowB
, replaceCharWithAboveB
, insertCharWithBelowB
, insertCharWithAboveB
, pointAfterCursorB
, destinationOfMoveB
, withEveryLineB
, startUpdateTransactionB
, commitUpdateTransactionB
, applyUpdate
, betweenB
, decreaseFontSize
, increaseFontSize
, indentSettingsB
, fontsizeVariationA
, encodingConverterNameA
) where
import Prelude hiding (foldr, mapM, notElem)
import Control.Applicative (Applicative ((*>), (<*>), pure), (<$>))
import Control.Lens (Lens', assign, lens, use, uses, view, (%=), (%~), (.=), (^.))
import Control.Monad.RWS.Strict (Endo (Endo, appEndo),
MonadReader (ask), MonadState,
MonadWriter (tell),
Monoid (mconcat, mempty), asks,
gets, join, modify,
replicateM_, runRWS, void,
when, (<>))
import Data.Binary (Binary (..), Get)
import Data.Char (ord)
import Data.Default (Default (def))
import Data.DynamicState.Serializable (getDyn, putDyn)
import Data.Foldable (Foldable (foldr), forM_, notElem)
import qualified Data.Map as M (Map, empty, insert, lookup)
import Data.Maybe (fromMaybe, isNothing)
import qualified Data.Set as Set (Set)
import qualified Data.Text as T (Text, concat, justifyRight, pack, snoc, unpack)
import qualified Data.Text.Encoding as E (decodeUtf8, encodeUtf8)
import Data.Time (UTCTime (UTCTime))
import Data.Traversable (Traversable (mapM), forM)
import Numeric (showHex)
import System.FilePath (joinPath, splitPath)
import Yi.Buffer.Basic (BufferRef, Point (..), Size (Size), WindowRef)
import Yi.Buffer.Implementation
import Yi.Buffer.Undo
import Yi.Interact as I (P (End))
import Yi.Monad (getsAndModify)
import Yi.Region (Region, mkRegion)
import Yi.Rope (YiString)
import qualified Yi.Rope as R
import Yi.Syntax (ExtHL (ExtHL), Stroke, noHighlighter)
import Yi.Types
import Yi.Utils (SemiNum ((+~)), makeClassyWithSuffix, makeLensesWithSuffix)
import Yi.Window (Window (width, wkey, actualLines), dummyWindow)
-- In addition to Buffer's text, this manages (among others):
-- * Log of updates mades
-- * Undo
makeClassyWithSuffix "A" ''Attributes
instance HasAttributes FBuffer where
attributesA = lens attributes (\(FBuffer f1 f2 _) a -> FBuffer f1 f2 a)
-- | Gets a short identifier of a buffer. If we're given a 'MemBuffer'
-- then just wraps the buffer name like so: @*name*@. If we're given a
-- 'FileBuffer', it drops the the number of characters specified.
--
-- >>> shortIdentString 3 (MemBuffer "hello")
-- "*hello*"
-- >>> shortIdentString 3 (FileBuffer "hello")
-- "lo"
shortIdentString :: Int -- ^ Number of characters to drop from FileBuffer names
-> FBuffer -- ^ Buffer to work with
-> T.Text
shortIdentString dl b = case b ^. identA of
MemBuffer bName -> "*" <> bName <> "*"
FileBuffer fName -> T.pack . joinPath . drop dl $ splitPath fName
-- | Gets the buffer's identifier string, emphasising the 'MemBuffer':
--
-- >>> identString (MemBuffer "hello")
-- "*hello*"
-- >>> identString (FileBuffer "hello")
-- "hello"
identString :: FBuffer -> T.Text
identString b = case b ^. identA of
MemBuffer bName -> "*" <> bName <> "*"
FileBuffer fName -> T.pack fName
-- TODO: proper instance + de-orphan
instance Show FBuffer where
show b = Prelude.concat [ "Buffer #", show (bkey b)
, " (", T.unpack (identString b), ")" ]
miniIdentString :: FBuffer -> T.Text
miniIdentString b = case b ^. identA of
MemBuffer bufName -> bufName
FileBuffer _ -> "MINIFILE:"
-- unfortunately the dynamic stuff can't be read.
instance Binary FBuffer where
put (FBuffer binmode r attributes_) =
let strippedRaw :: BufferImpl ()
strippedRaw = setSyntaxBI (modeHL emptyMode) r
in do
put binmode
put strippedRaw
put attributes_
get =
FBuffer <$> get <*> getStripped <*> get
where getStripped :: Get (BufferImpl ())
getStripped = get
-- | update the syntax information (clear the dirty "flag")
clearSyntax :: FBuffer -> FBuffer
clearSyntax = modifyRawbuf updateSyntax
queryRawbuf :: (forall syntax. BufferImpl syntax -> x) -> FBuffer -> x
queryRawbuf f (FBuffer _ fb _) = f fb
modifyRawbuf :: (forall syntax. BufferImpl syntax -> BufferImpl syntax) -> FBuffer -> FBuffer
modifyRawbuf f (FBuffer f1 f2 f3) = FBuffer f1 (f f2) f3
queryAndModifyRawbuf :: (forall syntax. BufferImpl syntax -> (BufferImpl syntax,x)) ->
FBuffer -> (FBuffer, x)
queryAndModifyRawbuf f (FBuffer f1 f5 f3) =
let (f5', x) = f f5
in (FBuffer f1 f5' f3, x)
file :: FBuffer -> Maybe FilePath
file b = case b ^. identA of
FileBuffer f -> Just f
MemBuffer _ -> Nothing
highlightSelectionA :: Lens' FBuffer Bool
highlightSelectionA = selectionStyleA .
lens highlightSelection (\e x -> e { highlightSelection = x })
rectangleSelectionA :: Lens' FBuffer Bool
rectangleSelectionA = selectionStyleA .
lens rectangleSelection (\e x -> e { rectangleSelection = x })
-- | Just stores the mode name.
instance Binary (Mode syntax) where
put = put . E.encodeUtf8 . modeName
get = do
n <- E.decodeUtf8 <$> get
return (emptyMode {modeName = n})
-- | Increases the font size in the buffer by specified number. What
-- this number actually means depends on the front-end.
increaseFontSize :: Int -> BufferM ()
increaseFontSize x = fontsizeVariationA %= \fs -> max 1 (fs + x)
-- | Decreases the font size in the buffer by specified number. What
-- this number actually means depends on the front-end.
decreaseFontSize :: Int -> BufferM ()
decreaseFontSize x = fontsizeVariationA %= \fs -> max 1 (fs - x)
-- | Given a buffer, and some information update the modeline
--
-- N.B. the contents of modelines should be specified by user, and
-- not hardcoded.
getModeLine :: [T.Text] -> BufferM T.Text
getModeLine prefix = withModeB (`modeModeLine` prefix)
defaultModeLine :: [T.Text] -> BufferM T.Text
defaultModeLine prefix = do
col <- curCol
pos <- pointB
ln <- curLn
p <- pointB
s <- sizeB
curChar <- readB
ro <-use readOnlyA
modeNm <- gets (withMode0 modeName)
unchanged <- gets isUnchangedBuffer
enc <- use encodingConverterNameA >>= return . \case
Nothing -> mempty
Just cn -> T.pack $ R.unCn cn
let pct
| pos == 0 || s == 0 = " Top"
| pos == s = " Bot"
| otherwise = getPercent p s
changed = if unchanged then "-" else "*"
readOnly' = if ro then "%" else changed
hexxed = T.pack $ showHex (ord curChar) ""
hexChar = "0x" <> T.justifyRight 2 '0' hexxed
toT = T.pack . show
nm <- gets $ shortIdentString (length prefix)
return $ T.concat [ enc, " ", readOnly', changed, " ", nm
, " ", hexChar, " "
, "L", T.justifyRight 5 ' ' (toT ln)
, " "
, "C", T.justifyRight 3 ' ' (toT col)
, " ", pct , " ", modeNm , " ", toT $ fromPoint p
]
-- | Given a point, and the file size, gives us a percent string
getPercent :: Point -> Point -> T.Text
getPercent a b = T.justifyRight 3 ' ' (T.pack $ show p) `T.snoc` '%'
where p = ceiling (aa / bb * 100.0 :: Double) :: Int
aa = fromIntegral a :: Double
bb = fromIntegral b :: Double
queryBuffer :: (forall syntax. BufferImpl syntax -> x) -> BufferM x
queryBuffer = gets . queryRawbuf
modifyBuffer :: (forall syntax. BufferImpl syntax -> BufferImpl syntax) -> BufferM ()
modifyBuffer = modify . modifyRawbuf
queryAndModify :: (forall syntax. BufferImpl syntax -> (BufferImpl syntax,x)) -> BufferM x
queryAndModify = getsAndModify . queryAndModifyRawbuf
-- | Adds an "overlay" to the buffer
addOverlayB :: Overlay -> BufferM ()
addOverlayB ov = do
pendingUpdatesA %= (++ [overlayUpdate ov])
modifyBuffer $ addOverlayBI ov
getOverlaysOfOwnerB :: R.YiString -> BufferM (Set.Set Overlay)
getOverlaysOfOwnerB owner = queryBuffer (getOverlaysOfOwnerBI owner)
-- | Remove an existing "overlay"
delOverlayB :: Overlay -> BufferM ()
delOverlayB ov = do
pendingUpdatesA %= (++ [overlayUpdate ov])
modifyBuffer $ delOverlayBI ov
delOverlaysOfOwnerB :: R.YiString -> BufferM ()
delOverlaysOfOwnerB owner =
modifyBuffer $ delOverlaysOfOwnerBI owner
isPointInsideOverlay :: Point -> Overlay -> Bool
isPointInsideOverlay point overlay =
let Overlay _ (MarkValue start _) (MarkValue finish _) _ _ = overlay
in start <= point && point <= finish
-- | Execute a @BufferM@ value on a given buffer and window. The new state of
-- the buffer is returned alongside the result of the computation.
runBuffer :: Window -> FBuffer -> BufferM a -> (a, FBuffer)
runBuffer w b f =
let (a, _, b') = runBufferFull w b f
in (a, b')
getMarks :: Window -> BufferM (Maybe WinMarks)
getMarks = gets . getMarksRaw
getMarksRaw :: Window -> FBuffer -> Maybe WinMarks
getMarksRaw w b = M.lookup (wkey w) (b ^. winMarksA)
runBufferFull :: Window -> FBuffer -> BufferM a -> (a, [Update], FBuffer)
runBufferFull w b f =
let (a, b', updates) = runRWS (fromBufferM f') w b
f' = do
ms <- getMarks w
when (isNothing ms) $ do
-- this window has no marks for this buffer yet; have to create them.
newMarkValues <- if wkey (b ^. lastActiveWindowA) == def
then return
-- no previous window, create some marks from scratch.
MarkSet { insMark = MarkValue 0 Forward,
selMark = MarkValue 0 Backward, -- sel
fromMark = MarkValue 0 Backward } -- from
else do
Just mrks <- uses winMarksA (M.lookup $ wkey (b ^. lastActiveWindowA))
forM mrks getMarkValueB
newMrks <- forM newMarkValues newMarkB
winMarksA %= M.insert (wkey w) newMrks
assign lastActiveWindowA w
f
in (a, updates, pendingUpdatesA %~ (++ fmap TextUpdate updates) $ b')
getMarkValueRaw :: Mark -> FBuffer -> MarkValue
getMarkValueRaw m = fromMaybe (MarkValue 0 Forward) . queryRawbuf (getMarkValueBI m)
getMarkValueB :: Mark -> BufferM MarkValue
getMarkValueB = gets . getMarkValueRaw
newMarkB :: MarkValue -> BufferM Mark
newMarkB v = queryAndModify $ newMarkBI v
deleteMarkB :: Mark -> BufferM ()
deleteMarkB m = modifyBuffer $ deleteMarkValueBI m
-- | Execute a @BufferM@ value on a given buffer, using a dummy window. The new state of
-- the buffer is discarded.
runBufferDummyWindow :: FBuffer -> BufferM a -> a
runBufferDummyWindow b = fst . runBuffer (dummyWindow $ bkey b) b
-- | Mark the current point in the undo list as a saved state.
markSavedB :: UTCTime -> BufferM ()
markSavedB t = do undosA %= setSavedFilePointU
assign lastSyncTimeA t
bkey :: FBuffer -> BufferRef
bkey = view bkey__A
isUnchangedBuffer :: FBuffer -> Bool
isUnchangedBuffer = isAtSavedFilePointU . view undosA
startUpdateTransactionB :: BufferM ()
startUpdateTransactionB = do
transactionPresent <- use updateTransactionInFlightA
if transactionPresent
then error "Already started update transaction"
else do
undosA %= addChangeU InteractivePoint
assign updateTransactionInFlightA True
commitUpdateTransactionB :: BufferM ()
commitUpdateTransactionB = do
transactionPresent <- use updateTransactionInFlightA
if not transactionPresent
then error "Not in update transaction"
else do
assign updateTransactionInFlightA False
transacAccum <- use updateTransactionAccumA
assign updateTransactionAccumA []
undosA %= (appEndo . mconcat) (Endo . addChangeU . AtomicChange <$> transacAccum)
undosA %= addChangeU InteractivePoint
undoRedo :: (forall syntax. Mark -> URList -> BufferImpl syntax
-> (BufferImpl syntax, (URList, [Update])))
-> BufferM ()
undoRedo f = do
isTransacPresent <- use updateTransactionInFlightA
if isTransacPresent
then error "Can't undo while undo transaction is in progress"
else do
m <- getInsMark
ur <- use undosA
(ur', updates) <- queryAndModify (f m ur)
assign undosA ur'
tell updates
undoB :: BufferM ()
undoB = undoRedo undoU
redoB :: BufferM ()
redoB = undoRedo redoU
-- | Undo all updates that happened since last save,
-- perform a given action and redo all updates again.
-- Given action must not modify undo history.
retroactivelyAtSavePointB :: BufferM a -> BufferM a
retroactivelyAtSavePointB action = do
(undoDepth, result) <- go 0
replicateM_ undoDepth redoB
return result
where
go step = do
atSavedPoint <- gets isUnchangedBuffer
if atSavedPoint
then (step,) <$> action
else undoB >> go (step + 1)
-- | Analogous to const, but returns a function that takes two parameters,
-- rather than one.
const2 :: t -> t1 -> t2 -> t
const2 x _ _ = x
-- | Mode applies function that always returns True.
modeAlwaysApplies :: a -> b -> Bool
modeAlwaysApplies = const2 True
-- | Mode applies function that always returns False.
modeNeverApplies :: a -> b -> Bool
modeNeverApplies = const2 False
emptyMode :: Mode syntax
emptyMode = Mode
{
modeName = "empty",
modeApplies = modeNeverApplies,
modeHL = ExtHL noHighlighter,
modePrettify = const $ return (),
modeKeymap = id,
modeIndent = \_ _ -> return (),
modeAdjustBlock = \_ _ -> return (),
modeFollow = const emptyAction,
modeIndentSettings = IndentSettings
{ expandTabs = True
, tabSize = 8
, shiftWidth = 4
},
modeToggleCommentSelection = Nothing,
modeGetStrokes = \_ _ _ _ -> [],
modeOnLoad = return (),
modeGotoDeclaration = return (),
modeModeLine = defaultModeLine
}
-- | Create buffer named @nm@ with contents @s@
newB :: BufferRef -> BufferId -> YiString -> FBuffer
newB unique nm s =
FBuffer { bmode = emptyMode
, rawbuf = newBI s
, attributes =
Attributes { ident = nm
, bkey__ = unique
, undos = emptyU
, preferCol = Nothing
, preferVisCol = Nothing
, bufferDynamic = mempty
, pendingUpdates = []
, selectionStyle = SelectionStyle False False
, keymapProcess = I.End
, winMarks = M.empty
, lastActiveWindow = dummyWindow unique
, lastSyncTime = epoch
, readOnly = False
, directoryContent = False
, inserting = True
, pointFollowsWindow = const False
, updateTransactionInFlight = False
, updateTransactionAccum = []
, fontsizeVariation = 0
, encodingConverterName = Nothing
} }
epoch :: UTCTime
epoch = UTCTime (toEnum 0) (toEnum 0)
-- | Point of eof
sizeB :: BufferM Point
sizeB = queryBuffer sizeBI
-- | Extract the current point
pointB :: BufferM Point
pointB = use . markPointA =<< getInsMark
nelemsB :: Int -> Point -> BufferM YiString
nelemsB n i = R.take n <$> streamB Forward i
streamB :: Direction -> Point -> BufferM YiString
streamB dir i = queryBuffer $ getStream dir i
indexedStreamB :: Direction -> Point -> BufferM [(Point,Char)]
indexedStreamB dir i = queryBuffer $ getIndexedStream dir i
strokesRangesB :: Maybe SearchExp -> Region -> BufferM [[Stroke]]
strokesRangesB regex r = do
p <- pointB
getStrokes <- withSyntaxB modeGetStrokes
queryBuffer $ strokesRangesBI getStrokes regex r p
------------------------------------------------------------------------
-- Point based operations
-- | Move point in buffer to the given index
moveTo :: Point -> BufferM ()
moveTo x = do
forgetPreferCol
maxP <- sizeB
let p = case () of
_ | x < 0 -> Point 0
| x > maxP -> maxP
| otherwise -> x
(.= p) . markPointA =<< getInsMark
------------------------------------------------------------------------
setInserting :: Bool -> BufferM ()
setInserting = assign insertingA
checkRO :: BufferM Bool
checkRO = do
ro <- use readOnlyA
when ro (fail "Read Only Buffer")
return ro
applyUpdate :: Update -> BufferM ()
applyUpdate update = do
ro <- checkRO
valid <- queryBuffer (isValidUpdate update)
when (not ro && valid) $ do
forgetPreferCol
let reversed = reverseUpdateI update
modifyBuffer (applyUpdateI update)
isTransacPresent <- use updateTransactionInFlightA
if isTransacPresent
then updateTransactionAccumA %= (reversed:)
else undosA %= addChangeU (AtomicChange reversed)
tell [update]
-- otherwise, just ignore.
-- | Revert all the pending updates; don't touch the point.
revertPendingUpdatesB :: BufferM ()
revertPendingUpdatesB = do
updates <- use pendingUpdatesA
modifyBuffer (flip (foldr (\u bi -> applyUpdateI (reverseUpdateI u) bi)) [u | TextUpdate u <- updates])
-- | Write an element into the buffer at the current point.
writeB :: Char -> BufferM ()
writeB c = do
deleteN 1
insertB c
-- | Write the list into the buffer at current point.
writeN :: YiString -> BufferM ()
writeN cs = do
off <- pointB
deleteNAt Forward (R.length cs) off
insertNAt cs off
-- | Insert newline at current point.
newlineB :: BufferM ()
newlineB = insertB '\n'
------------------------------------------------------------------------
-- | Insert given 'YiString' at specified point, extending size of the
-- buffer.
insertNAt :: YiString -> Point -> BufferM ()
insertNAt rope pnt = applyUpdate (Insert pnt Forward rope)
-- | Insert the 'YiString' at current point, extending size of buffer
insertN :: YiString -> BufferM ()
insertN cs = pointB >>= insertNAt cs
-- | Insert the char at current point, extending size of buffer
--
-- Implementation note: This just 'insertB's a 'R.singleton'. This
-- seems sub-optimal because we should be able to do much better
-- without spewing chunks of size 1 everywhere. This approach is
-- necessary however so an 'Update' can be recorded. A possible
-- improvement for space would be to have ‘yi-rope’ package optimise
-- for appends with length 1.
insertB :: Char -> BufferM ()
insertB = insertN . R.singleton
------------------------------------------------------------------------
-- | @deleteNAt n p@ deletes @n@ characters forwards from position @p@
deleteNAt :: Direction -> Int -> Point -> BufferM ()
deleteNAt _ 0 _ = return ()
deleteNAt dir n pos = do
els <- R.take n <$> streamB Forward pos
applyUpdate $ Delete pos dir els
------------------------------------------------------------------------
-- Line based editing
-- | Return the current line number
curLn :: BufferM Int
curLn = do
p <- pointB
queryBuffer (lineAt p)
-- | Top line of the screen
screenTopLn :: BufferM Int
screenTopLn = do
p <- use . markPointA =<< fromMark <$> askMarks
queryBuffer (lineAt p)
-- | Middle line of the screen
screenMidLn :: BufferM Int
screenMidLn = (+) <$> screenTopLn <*> (div <$> screenLines <*> pure 2)
-- | Bottom line of the screen
screenBotLn :: BufferM Int
screenBotLn = (+) <$> screenTopLn <*> screenLines
-- | Amount of lines in the screen
screenLines :: BufferM Int
screenLines = pred <$> askWindow actualLines
-- | Return line numbers of marks
markLines :: BufferM (MarkSet Int)
markLines = mapM getLn =<< askMarks
where getLn m = use (markPointA m) >>= lineOf
-- | Go to line number @n@. @n@ is indexed from 1. Returns the
-- actual line we went to (which may be not be the requested line,
-- if it was out of range)
gotoLn :: Int -> BufferM Int
gotoLn x = do
moveTo 0
succ <$> gotoLnFrom (x - 1)
---------------------------------------------------------------------
setMode0 :: forall syntax. Mode syntax -> FBuffer -> FBuffer
setMode0 m (FBuffer _ rb at) = FBuffer m (setSyntaxBI (modeHL m) rb) at
modifyMode0 :: (forall syntax. Mode syntax -> Mode syntax) -> FBuffer -> FBuffer
modifyMode0 f (FBuffer m rb f3) = FBuffer m' (setSyntaxBI (modeHL m') rb) f3
where m' = f m
-- | Set the mode
setAnyMode :: AnyMode -> BufferM ()
setAnyMode (AnyMode m) = setMode m
setMode :: Mode syntax -> BufferM ()
setMode m = do
modify (setMode0 m)
-- reset the keymap process so we use the one of the new mode.
assign keymapProcessA I.End
modeOnLoad m
-- | Modify the mode
modifyMode :: (forall syntax. Mode syntax -> Mode syntax) -> BufferM ()
modifyMode f = do
modify (modifyMode0 f)
-- reset the keymap process so we use the one of the new mode.
assign keymapProcessA I.End
onMode :: (forall syntax. Mode syntax -> Mode syntax) -> AnyMode -> AnyMode
onMode f (AnyMode m) = AnyMode (f m)
withMode0 :: (forall syntax. Mode syntax -> a) -> FBuffer -> a
withMode0 f FBuffer {bmode = m} = f m
withModeB :: (forall syntax. Mode syntax -> BufferM a) -> BufferM a
withModeB = join . gets . withMode0
withSyntax0 :: (forall syntax. Mode syntax -> syntax -> a) -> WindowRef -> FBuffer -> a
withSyntax0 f wk (FBuffer bm rb _attrs) = f bm (getAst wk rb)
withSyntaxB :: (forall syntax. Mode syntax -> syntax -> a) -> BufferM a
withSyntaxB f = withSyntax0 f <$> askWindow wkey <*> use id
focusSyntax :: M.Map WindowRef Region -> FBuffer -> FBuffer
focusSyntax r = modifyRawbuf (focusAst r)
withSyntaxB' :: (forall syntax. Mode syntax -> syntax -> BufferM a) -> BufferM a
withSyntaxB' = join . withSyntaxB
-- | Return indices of strings in buffer matched by regex in the
-- given region.
regexRegionB :: SearchExp -> Region -> BufferM [Region]
regexRegionB regex region = queryBuffer $ regexRegionBI regex region
-- | Return indices of next string in buffer matched by regex in the
-- given direction
regexB :: Direction -> SearchExp -> BufferM [Region]
regexB dir rx = do
p <- pointB
s <- sizeB
regexRegionB rx (mkRegion p (case dir of Forward -> s; Backward -> 0))
---------------------------------------------------------------------
modifyMarkRaw :: Mark -> (MarkValue -> MarkValue) -> FBuffer -> FBuffer
modifyMarkRaw m f = modifyRawbuf $ modifyMarkBI m f
modifyMarkB :: Mark -> (MarkValue -> MarkValue) -> BufferM ()
modifyMarkB = (modify .) . modifyMarkRaw
setMarkHereB :: BufferM Mark
setMarkHereB = getMarkB Nothing
setNamedMarkHereB :: String -> BufferM ()
setNamedMarkHereB name = do
p <- pointB
getMarkB (Just name) >>= (.= p) . markPointA
-- | Highlight the selection
setVisibleSelection :: Bool -> BufferM ()
setVisibleSelection = assign highlightSelectionA
-- | Whether the selection is highlighted
getVisibleSelection :: BufferM Bool
getVisibleSelection = use highlightSelectionA
getInsMark :: BufferM Mark
getInsMark = insMark <$> askMarks
askMarks :: BufferM WinMarks
askMarks = do
Just ms <- getMarks =<< ask
return ms
getMarkB :: Maybe String -> BufferM Mark
getMarkB m = do
p <- pointB
queryAndModify (getMarkDefaultPosBI m p)
mayGetMarkB :: String -> BufferM (Maybe Mark)
mayGetMarkB m = queryBuffer (getMarkBI m)
-- | Move point by the given number of characters.
-- A negative offset moves backwards a positive one forward.
moveN :: Int -> BufferM ()
moveN n = do
s <- sizeB
moveTo =<< min s . max 0 . (+~ Size n) <$> pointB
-- | Move point -1
leftB :: BufferM ()
leftB = leftN 1
-- | Move cursor -n
leftN :: Int -> BufferM ()
leftN n = moveN (-n)
-- | Move cursor +1
rightB :: BufferM ()
rightB = rightN 1
-- | Move cursor +n
rightN :: Int -> BufferM ()
rightN = moveN
-- ---------------------------------------------------------------------
-- Line based movement and friends
-- | Move point down by @n@ lines. @n@ can be negative.
-- Returns the actual difference in lines which we moved which
-- may be negative if the requested line difference is negative.
lineMoveRel :: Int -> BufferM Int
lineMoveRel = movingToPrefCol . gotoLnFrom
movingToPrefCol :: BufferM a -> BufferM a
movingToPrefCol f = do
prefCol <- use preferColA
targetCol <- maybe curCol return prefCol
r <- f
moveToColB targetCol
preferColA .= Just targetCol
return r
-- | Moves to a visual column within the current line as shown
-- on the editor (ie, moving within the current width of a
-- single visual line)
movingToPrefVisCol :: BufferM a -> BufferM a
movingToPrefVisCol f = do
prefCol <- use preferVisColA
targetCol <- maybe curVisCol return prefCol
r <- f
moveToVisColB targetCol
preferVisColA .= Just targetCol
return r
moveToColB :: Int -> BufferM ()
moveToColB targetCol = do
solPnt <- solPointB =<< pointB
chrs <- R.toString <$> nelemsB targetCol solPnt
is <- indentSettingsB
let cols = scanl (colMove is) 0 chrs -- columns corresponding to the char
toSkip = takeWhile (\(char,col) -> char /= '\n' && col < targetCol) (zip chrs cols)
moveTo $ solPnt +~ fromIntegral (length toSkip)
moveToVisColB :: Int -> BufferM ()
moveToVisColB targetCol = do
col <- curCol
wid <- width <$> use lastActiveWindowA
let jumps = col `div` wid
moveToColB $ jumps * wid + targetCol
moveToLineColB :: Int -> Int -> BufferM ()
moveToLineColB line col = gotoLn line >> moveToColB col
pointOfLineColB :: Int -> Int -> BufferM Point
pointOfLineColB line col = savingPointB $ moveToLineColB line col >> pointB
forgetPreferCol :: BufferM ()
forgetPreferCol = preferColA .= Nothing >> preferVisColA .= Nothing
savingPrefCol :: BufferM a -> BufferM a
savingPrefCol f = do
pc <- use preferColA
pv <- use preferVisColA
result <- f
preferColA .= pc
preferVisColA .= pv
return result
-- | Move point up one line
lineUp :: BufferM ()
lineUp = void (lineMoveRel (-1))
-- | Move point down one line
lineDown :: BufferM ()
lineDown = void (lineMoveRel 1)
-- | Return the contents of the buffer.
elemsB :: BufferM YiString
elemsB = queryBuffer mem
-- | Returns the contents of the buffer between the two points.
--
-- If the @startPoint >= endPoint@, empty string is returned. If the
-- points are out of bounds, as much of the content as possible is
-- taken: you're not guaranteed to get @endPoint - startPoint@
-- characters.
betweenB :: Point -- ^ Point to start at
-> Point -- ^ Point to stop at
-> BufferM YiString
betweenB (Point s) (Point e) =
if s >= e
then return mempty
else snd . R.splitAt s . fst . R.splitAt e <$> elemsB
-- | Read the character at the current point
readB :: BufferM Char
readB = pointB >>= readAtB
-- | Read the character at the given index
-- This is an unsafe operation: character NUL is returned when out of bounds
readAtB :: Point -> BufferM Char
readAtB i = R.head <$> nelemsB 1 i >>= return . \case
Nothing -> '\0'
Just c -> c
replaceCharB :: Char -> BufferM ()
replaceCharB c = do
writeB c
leftB
replaceCharWithBelowB :: BufferM ()
replaceCharWithBelowB = replaceCharWithVerticalOffset 1
replaceCharWithAboveB :: BufferM ()
replaceCharWithAboveB = replaceCharWithVerticalOffset (-1)
insertCharWithBelowB :: BufferM ()
insertCharWithBelowB = maybe (return ()) insertB =<< maybeCharBelowB
insertCharWithAboveB :: BufferM ()
insertCharWithAboveB = maybe (return ()) insertB =<< maybeCharAboveB
replaceCharWithVerticalOffset :: Int -> BufferM ()
replaceCharWithVerticalOffset offset =
maybe (return ()) replaceCharB =<< maybeCharWithVerticalOffset offset
maybeCharBelowB :: BufferM (Maybe Char)
maybeCharBelowB = maybeCharWithVerticalOffset 1
maybeCharAboveB :: BufferM (Maybe Char)
maybeCharAboveB = maybeCharWithVerticalOffset (-1)
maybeCharWithVerticalOffset :: Int -> BufferM (Maybe Char)
maybeCharWithVerticalOffset offset = savingPointB $ do
l0 <- curLn
c0 <- curCol
void $ lineMoveRel offset
l1 <- curLn
c1 <- curCol
curChar <- readB
return $ if c0 == c1
&& l0 + offset == l1
&& curChar `notElem` ("\n\0" :: String)
then Just curChar
else Nothing
-- | Delete @n@ characters forward from the current point
deleteN :: Int -> BufferM ()
deleteN n = pointB >>= deleteNAt Forward n
------------------------------------------------------------------------
-- | Gives the 'IndentSettings' for the current buffer.
indentSettingsB :: BufferM IndentSettings
indentSettingsB = withModeB $ return . modeIndentSettings
-- | Current column.
-- Note that this is different from offset or number of chars from sol.
-- (This takes into account tabs, unicode chars, etc.)
curCol :: BufferM Int
curCol = colOf =<< pointB
-- | Current column, visually.
curVisCol :: BufferM Int
curVisCol = rem <$> curCol <*> (width <$> use lastActiveWindowA)
colOf :: Point -> BufferM Int
colOf p = do
is <- indentSettingsB
R.foldl' (colMove is) 0 <$> queryBuffer (charsFromSolBI p)
lineOf :: Point -> BufferM Int
lineOf p = queryBuffer $ lineAt p
lineCountB :: BufferM Int
lineCountB = lineOf =<< sizeB
-- | Decides which column we should be on after the given character.
colMove :: IndentSettings -> Int -> Char -> Int
colMove is col '\t' | tabSize is > 1 = col + tabSize is
colMove _ col _ = col + 1
-- | Returns start of line point for a given point @p@
solPointB :: Point -> BufferM Point
solPointB p = queryBuffer $ solPoint' p
-- | Returns end of line for given point.
eolPointB :: Point -> BufferM Point
eolPointB p = queryBuffer $ eolPoint' p
-- | Go to line indexed from current point
-- Returns the actual moved difference which of course
-- may be negative if the requested difference was negative.
gotoLnFrom :: Int -> BufferM Int
gotoLnFrom x = do
l <- curLn
p' <- queryBuffer $ solPoint (l + x)
moveTo p'
l' <- curLn
return (l' - l)
-- | Access to a value into the extensible state, keyed by its type.
-- This allows you to retrieve inside a 'BufferM' monad, ie:
--
-- > value <- getBufferDyn
getBufferDyn :: (YiVariable a, MonadState FBuffer m, Functor m) => m a
getBufferDyn = fromMaybe def <$> getDyn (use bufferDynamicA) (assign bufferDynamicA)
-- | Access to a value into the extensible state, keyed by its type.
-- This allows you to save inside a 'BufferM' monad, ie:
--
-- > putBufferDyn updatedvalue
putBufferDyn :: (YiVariable a, MonadState FBuffer m, Functor m) => a -> m ()
putBufferDyn = putDyn (use bufferDynamicA) (assign bufferDynamicA)
-- | perform a @BufferM a@, and return to the current point. (by using a mark)
savingExcursionB :: BufferM a -> BufferM a
savingExcursionB f = do
m <- getMarkB Nothing
res <- f
moveTo =<< use (markPointA m)
return res
markPointA :: Mark -> Lens' FBuffer Point
markPointA mark = lens getter setter where
getter b = markPoint $ getMarkValueRaw mark b
setter b pos = modifyMarkRaw mark (\v -> v {markPoint = pos}) b
-- | Perform an @BufferM a@, and return to the current point.
savingPointB :: BufferM a -> BufferM a
savingPointB f = savingPrefCol $ do
p <- pointB
res <- f
moveTo p
return res
-- | Perform an @BufferM a@, and return to the current line and column
-- number. The difference between this and 'savingPointB' is that here
-- we attempt to return to the specific line and column number, rather
-- than a specific number of characters from the beginning of the
-- buffer.
--
-- In case the column is further away than EOL, the point is left at
-- EOL: 'moveToLineColB' is used internally.
savingPositionB :: BufferM a -> BufferM a
savingPositionB f = savingPrefCol $ do
(c, l) <- (,) <$> curCol <*> curLn
res <- f
moveToLineColB l c
return res
pointAt :: BufferM a -> BufferM Point
pointAt f = savingPointB (f *> pointB)
pointAfterCursorB :: Point -> BufferM Point
pointAfterCursorB p = pointAt $ do
moveTo p
rightB
-- | What would be the point after doing the given action?
-- The argument must not modify the buffer.
destinationOfMoveB :: BufferM a -> BufferM Point
destinationOfMoveB f = savingPointB (f >> pointB)
-------------
-- Window
askWindow :: (Window -> a) -> BufferM a
askWindow = asks
withEveryLineB :: BufferM () -> BufferM ()
withEveryLineB action = savingPointB $ do
lineCount <- lineCountB
forM_ [1 .. lineCount] $ \l -> do
void $ gotoLn l
action
makeLensesWithSuffix "A" ''IndentSettings
makeLensesWithSuffix "A" ''Mode
| TOSPIO/yi | src/library/Yi/Buffer/Misc.hs | gpl-2.0 | 36,574 | 0 | 24 | 8,887 | 9,227 | 4,861 | 4,366 | 825 | 4 |
import System.IO (hFlush, stdout)
import System.Environment (getArgs)
import Control.Monad (mapM)
import Control.Monad.Error (runErrorT)
import Control.Monad.Trans (liftIO)
import qualified Data.Map as Map
import qualified Data.Traversable as DT
import Readline (readline, load_history)
import Types
import Reader (read_str)
import Printer (_pr_str)
import Env (Env, env_new, env_bind, env_get, env_set)
import Core as Core
-- read
mal_read :: String -> IOThrows MalVal
mal_read str = read_str str
-- eval
eval_ast :: MalVal -> Env -> IOThrows MalVal
eval_ast sym@(MalSymbol _) env = env_get env sym
eval_ast ast@(MalList lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalList new_lst m
eval_ast ast@(MalVector lst m) env = do
new_lst <- mapM (\x -> (eval x env)) lst
return $ MalVector new_lst m
eval_ast ast@(MalHashMap lst m) env = do
new_hm <- DT.mapM (\x -> (eval x env)) lst
return $ MalHashMap new_hm m
eval_ast ast env = return ast
let_bind :: Env -> [MalVal] -> IOThrows Env
let_bind env [] = return env
let_bind env (b:e:xs) = do
evaled <- eval e env
x <- liftIO $ env_set env b evaled
let_bind env xs
apply_ast :: MalVal -> Env -> IOThrows MalVal
apply_ast ast@(MalList [] _) env = do
return ast
apply_ast ast@(MalList (MalSymbol "def!" : args) _) env = do
case args of
(a1@(MalSymbol _): a2 : []) -> do
evaled <- eval a2 env
liftIO $ env_set env a1 evaled
_ -> throwStr "invalid def!"
apply_ast ast@(MalList (MalSymbol "let*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
let_env <- liftIO $ env_new $ Just env
let_bind let_env params
eval a2 let_env
_ -> throwStr "invalid let*"
apply_ast ast@(MalList (MalSymbol "do" : args) _) env = do
case args of
([]) -> return Nil
_ -> do
el <- eval_ast (MalList args Nil) env
case el of
(MalList lst _) -> return $ last lst
apply_ast ast@(MalList (MalSymbol "if" : args) _) env = do
case args of
(a1 : a2 : a3 : []) -> do
cond <- eval a1 env
if cond == MalFalse || cond == Nil
then eval a3 env
else eval a2 env
(a1 : a2 : []) -> do
cond <- eval a1 env
if cond == MalFalse || cond == Nil
then return Nil
else eval a2 env
_ -> throwStr "invalid if"
apply_ast ast@(MalList (MalSymbol "fn*" : args) _) env = do
case args of
(a1 : a2 : []) -> do
params <- (_to_list a1)
return $ (_malfunc a2 env (MalList params Nil)
(\args -> do
fn_env1 <- liftIO $ env_new $ Just env
fn_env2 <- liftIO $ env_bind fn_env1 params args
eval a2 fn_env2))
_ -> throwStr "invalid fn*"
apply_ast ast@(MalList _ _) env = do
el <- eval_ast ast env
case el of
(MalList ((Func (Fn f) _) : rest) _) ->
f $ rest
(MalList ((MalFunc {ast=ast, env=fn_env, params=(MalList params Nil)}) : rest) _) -> do
fn_env1 <- liftIO $ env_new $ Just fn_env
fn_env2 <- liftIO $ env_bind fn_env1 params rest
eval ast fn_env2
el ->
throwStr $ "invalid apply: " ++ (show el)
eval :: MalVal -> Env -> IOThrows MalVal
eval ast env = do
case ast of
(MalList _ _) -> apply_ast ast env
_ -> eval_ast ast env
-- print
mal_print :: MalVal -> String
mal_print exp = show exp
-- repl
rep :: Env -> String -> IOThrows String
rep env line = do
ast <- mal_read line
exp <- eval ast env
return $ mal_print exp
repl_loop :: Env -> IO ()
repl_loop env = do
line <- readline "user> "
case line of
Nothing -> return ()
Just "" -> repl_loop env
Just str -> do
res <- runErrorT $ rep env str
out <- case res of
Left (StringError str) -> return $ "Error: " ++ str
Left (MalValError mv) -> return $ "Error: " ++ (show mv)
Right val -> return val
putStrLn out
hFlush stdout
repl_loop env
main = do
args <- getArgs
load_history
repl_env <- env_new Nothing
-- core.hs: defined using Haskell
(mapM (\(k,v) -> (env_set repl_env (MalSymbol k) v)) Core.ns)
env_set repl_env (MalSymbol "eval") (_func (\[ast] -> eval ast repl_env))
env_set repl_env (MalSymbol "*ARGV*") (MalList [] Nil)
-- core.mal: defined using the language itself
runErrorT $ rep repl_env "(def! not (fn* (a) (if a false true)))"
runErrorT $ rep repl_env "(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))"
if length args > 0 then do
env_set repl_env (MalSymbol "*ARGV*") (MalList (map MalString (drop 1 args)) Nil)
runErrorT $ rep repl_env $ "(load-file \"" ++ (args !! 0) ++ "\")"
return ()
else
repl_loop repl_env
| mpwillson/mal | haskell/step6_file.hs | mpl-2.0 | 5,134 | 0 | 21 | 1,671 | 1,958 | 962 | 996 | 132 | 11 |
{-# LANGUAGE CPP, StandaloneDeriving, GeneralizedNewtypeDeriving #-}
module GHCi.RemoteTypes
( RemotePtr(..), toRemotePtr, fromRemotePtr, castRemotePtr
, HValue(..)
, RemoteRef, mkRemoteRef, localRef, freeRemoteRef
, HValueRef, toHValueRef
, ForeignRef, mkForeignRef, withForeignRef
, ForeignHValue
, unsafeForeignRefToRemoteRef, finalizeForeignRef
) where
import Data.Word
import Foreign hiding (newForeignPtr)
import Foreign.Concurrent
import Data.Binary
import Unsafe.Coerce
import GHC.Exts
import GHC.ForeignPtr
-- -----------------------------------------------------------------------------
-- RemotePtr
-- Static pointers only; don't use this for heap-resident pointers.
-- Instead use HValueRef.
#include "MachDeps.h"
#if SIZEOF_HSINT == 4
newtype RemotePtr a = RemotePtr Word32
#elif SIZEOF_HSINT == 8
newtype RemotePtr a = RemotePtr Word64
#endif
toRemotePtr :: Ptr a -> RemotePtr a
toRemotePtr p = RemotePtr (fromIntegral (ptrToWordPtr p))
fromRemotePtr :: RemotePtr a -> Ptr a
fromRemotePtr (RemotePtr p) = wordPtrToPtr (fromIntegral p)
castRemotePtr :: RemotePtr a -> RemotePtr b
castRemotePtr (RemotePtr a) = RemotePtr a
deriving instance Show (RemotePtr a)
deriving instance Binary (RemotePtr a)
-- -----------------------------------------------------------------------------
-- HValueRef
newtype HValue = HValue Any
instance Show HValue where
show _ = "<HValue>"
-- | A reference to a remote value. These are allocated and freed explicitly.
newtype RemoteRef a = RemoteRef (RemotePtr ())
deriving (Show, Binary)
-- We can discard type information if we want
toHValueRef :: RemoteRef a -> RemoteRef HValue
toHValueRef = unsafeCoerce
-- For convenience
type HValueRef = RemoteRef HValue
-- | Make a reference to a local value that we can send remotely.
-- This reference will keep the value that it refers to alive until
-- 'freeRemoteRef' is called.
mkRemoteRef :: a -> IO (RemoteRef a)
mkRemoteRef a = do
sp <- newStablePtr a
return $! RemoteRef (toRemotePtr (castStablePtrToPtr sp))
-- | Convert an HValueRef to an HValue. Should only be used if the HValue
-- originated in this process.
localRef :: RemoteRef a -> IO a
localRef (RemoteRef w) =
deRefStablePtr (castPtrToStablePtr (fromRemotePtr w))
-- | Release an HValueRef that originated in this process
freeRemoteRef :: RemoteRef a -> IO ()
freeRemoteRef (RemoteRef w) =
freeStablePtr (castPtrToStablePtr (fromRemotePtr w))
-- | An HValueRef with a finalizer
newtype ForeignRef a = ForeignRef (ForeignPtr ())
type ForeignHValue = ForeignRef HValue
-- | Create a 'ForeignRef' from a 'RemoteRef'. The finalizer
-- should arrange to call 'freeHValueRef' on the 'HValueRef'. (since
-- this function needs to be called in the process that created the
-- 'HValueRef', it cannot be called directly from the finalizer).
mkForeignRef :: RemoteRef a -> IO () -> IO (ForeignRef a)
mkForeignRef (RemoteRef hvref) finalizer =
ForeignRef <$> newForeignPtr (fromRemotePtr hvref) finalizer
-- | Use a 'ForeignHValue'
withForeignRef :: ForeignRef a -> (RemoteRef a -> IO b) -> IO b
withForeignRef (ForeignRef fp) f =
withForeignPtr fp (f . RemoteRef . toRemotePtr)
unsafeForeignRefToRemoteRef :: ForeignRef a -> RemoteRef a
unsafeForeignRefToRemoteRef (ForeignRef fp) =
RemoteRef (toRemotePtr (unsafeForeignPtrToPtr fp))
finalizeForeignRef :: ForeignRef a -> IO ()
finalizeForeignRef (ForeignRef fp) = finalizeForeignPtr fp
| tolysz/prepare-ghcjs | spec-lts8/ghci/GHCi/RemoteTypes.hs | bsd-3-clause | 3,441 | 0 | 12 | 526 | 735 | 392 | 343 | 55 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Encoding
-- Copyright : (c) The University of Glasgow, 2008-2009
-- License : see libraries/base/LICENSE
--
-- Maintainer : [email protected]
-- Stability : internal
-- Portability : non-portable
--
-- Text codecs for I/O
--
-----------------------------------------------------------------------------
module GHC.IO.Encoding (
BufferCodec(..), TextEncoding(..), TextEncoder, TextDecoder, CodingProgress(..),
latin1, latin1_encode, latin1_decode,
utf8, utf8_bom,
utf16, utf16le, utf16be,
utf32, utf32le, utf32be,
initLocaleEncoding,
getLocaleEncoding, getFileSystemEncoding, getForeignEncoding,
setLocaleEncoding, setFileSystemEncoding, setForeignEncoding,
char8,
mkTextEncoding,
argvEncoding
) where
import GHC.Base
import GHC.IO.Exception
import GHC.IO.Buffer
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
#if !defined(mingw32_HOST_OS)
import qualified GHC.IO.Encoding.Iconv as Iconv
#else
import qualified GHC.IO.Encoding.CodePage as CodePage
import Text.Read (reads)
#endif
import qualified GHC.IO.Encoding.Latin1 as Latin1
import qualified GHC.IO.Encoding.UTF8 as UTF8
import qualified GHC.IO.Encoding.UTF16 as UTF16
import qualified GHC.IO.Encoding.UTF32 as UTF32
import GHC.List
import GHC.Word
import Data.IORef
import Data.Char (toUpper)
import System.IO.Unsafe (unsafePerformIO)
-- -----------------------------------------------------------------------------
-- | The Latin1 (ISO8859-1) encoding. This encoding maps bytes
-- directly to the first 256 Unicode code points, and is thus not a
-- complete Unicode encoding. An attempt to write a character greater than
-- '\255' to a 'Handle' using the 'latin1' encoding will result in an error.
latin1 :: TextEncoding
latin1 = Latin1.latin1_checked
-- | The UTF-8 Unicode encoding
utf8 :: TextEncoding
utf8 = UTF8.utf8
-- | The UTF-8 Unicode encoding, with a byte-order-mark (BOM; the byte
-- sequence 0xEF 0xBB 0xBF). This encoding behaves like 'utf8',
-- except that on input, the BOM sequence is ignored at the beginning
-- of the stream, and on output, the BOM sequence is prepended.
--
-- The byte-order-mark is strictly unnecessary in UTF-8, but is
-- sometimes used to identify the encoding of a file.
--
utf8_bom :: TextEncoding
utf8_bom = UTF8.utf8_bom
-- | The UTF-16 Unicode encoding (a byte-order-mark should be used to
-- indicate endianness).
utf16 :: TextEncoding
utf16 = UTF16.utf16
-- | The UTF-16 Unicode encoding (litte-endian)
utf16le :: TextEncoding
utf16le = UTF16.utf16le
-- | The UTF-16 Unicode encoding (big-endian)
utf16be :: TextEncoding
utf16be = UTF16.utf16be
-- | The UTF-32 Unicode encoding (a byte-order-mark should be used to
-- indicate endianness).
utf32 :: TextEncoding
utf32 = UTF32.utf32
-- | The UTF-32 Unicode encoding (litte-endian)
utf32le :: TextEncoding
utf32le = UTF32.utf32le
-- | The UTF-32 Unicode encoding (big-endian)
utf32be :: TextEncoding
utf32be = UTF32.utf32be
-- | The Unicode encoding of the current locale
--
-- @since 4.5.0.0
getLocaleEncoding :: IO TextEncoding
-- | The Unicode encoding of the current locale, but allowing arbitrary
-- undecodable bytes to be round-tripped through it.
--
-- This 'TextEncoding' is used to decode and encode command line arguments
-- and environment variables on non-Windows platforms.
--
-- On Windows, this encoding *should not* be used if possible because
-- the use of code pages is deprecated: Strings should be retrieved
-- via the "wide" W-family of UTF-16 APIs instead
--
-- @since 4.5.0.0
getFileSystemEncoding :: IO TextEncoding
-- | The Unicode encoding of the current locale, but where undecodable
-- bytes are replaced with their closest visual match. Used for
-- the 'CString' marshalling functions in "Foreign.C.String"
--
-- @since 4.5.0.0
getForeignEncoding :: IO TextEncoding
-- | @since 4.5.0.0
setLocaleEncoding, setFileSystemEncoding, setForeignEncoding :: TextEncoding -> IO ()
(getLocaleEncoding, setLocaleEncoding) = mkGlobal initLocaleEncoding
(getFileSystemEncoding, setFileSystemEncoding) = mkGlobal initFileSystemEncoding
(getForeignEncoding, setForeignEncoding) = mkGlobal initForeignEncoding
mkGlobal :: a -> (IO a, a -> IO ())
mkGlobal x = unsafePerformIO $ do
x_ref <- newIORef x
return (readIORef x_ref, writeIORef x_ref)
-- | @since 4.5.0.0
initLocaleEncoding, initFileSystemEncoding, initForeignEncoding :: TextEncoding
#if !defined(mingw32_HOST_OS)
-- It is rather important that we don't just call Iconv.mkIconvEncoding here
-- because some iconvs (in particular GNU iconv) will brokenly UTF-8 encode
-- lone surrogates without complaint.
--
-- By going through our Haskell implementations of those encodings, we are
-- guaranteed to catch such errors.
--
-- FIXME: this is not a complete solution because if the locale encoding is one
-- which we don't have a Haskell-side decoder for, iconv might still ignore the
-- lone surrogate in the input.
initLocaleEncoding = unsafePerformIO $ mkTextEncoding' ErrorOnCodingFailure Iconv.localeEncodingName
initFileSystemEncoding = unsafePerformIO $ mkTextEncoding' RoundtripFailure Iconv.localeEncodingName
initForeignEncoding = unsafePerformIO $ mkTextEncoding' IgnoreCodingFailure Iconv.localeEncodingName
#else
initLocaleEncoding = CodePage.localeEncoding
initFileSystemEncoding = CodePage.mkLocaleEncoding RoundtripFailure
initForeignEncoding = CodePage.mkLocaleEncoding IgnoreCodingFailure
#endif
-- See Note [Windows Unicode Arguments] in rts/RtsFlags.c
-- On Windows we assume hs_init argv is in utf8 encoding.
-- | Internal encoding of argv
argvEncoding :: IO TextEncoding
#if defined(mingw32_HOST_OS)
argvEncoding = return utf8
#else
argvEncoding = getFileSystemEncoding
#endif
-- | An encoding in which Unicode code points are translated to bytes
-- by taking the code point modulo 256. When decoding, bytes are
-- translated directly into the equivalent code point.
--
-- This encoding never fails in either direction. However, encoding
-- discards information, so encode followed by decode is not the
-- identity.
--
-- @since 4.4.0.0
char8 :: TextEncoding
char8 = Latin1.latin1
-- | Look up the named Unicode encoding. May fail with
--
-- * 'isDoesNotExistError' if the encoding is unknown
--
-- The set of known encodings is system-dependent, but includes at least:
--
-- * @UTF-8@
--
-- * @UTF-16@, @UTF-16BE@, @UTF-16LE@
--
-- * @UTF-32@, @UTF-32BE@, @UTF-32LE@
--
-- There is additional notation (borrowed from GNU iconv) for specifying
-- how illegal characters are handled:
--
-- * a suffix of @\/\/IGNORE@, e.g. @UTF-8\/\/IGNORE@, will cause
-- all illegal sequences on input to be ignored, and on output
-- will drop all code points that have no representation in the
-- target encoding.
--
-- * a suffix of @\/\/TRANSLIT@ will choose a replacement character
-- for illegal sequences or code points.
--
-- * a suffix of @\/\/ROUNDTRIP@ will use a PEP383-style escape mechanism
-- to represent any invalid bytes in the input as Unicode codepoints (specifically,
-- as lone surrogates, which are normally invalid in UTF-32).
-- Upon output, these special codepoints are detected and turned back into the
-- corresponding original byte.
--
-- In theory, this mechanism allows arbitrary data to be roundtripped via
-- a 'String' with no loss of data. In practice, there are two limitations
-- to be aware of:
--
-- 1. This only stands a chance of working for an encoding which is an ASCII
-- superset, as for security reasons we refuse to escape any bytes smaller
-- than 128. Many encodings of interest are ASCII supersets (in particular,
-- you can assume that the locale encoding is an ASCII superset) but many
-- (such as UTF-16) are not.
--
-- 2. If the underlying encoding is not itself roundtrippable, this mechanism
-- can fail. Roundtrippable encodings are those which have an injective mapping
-- into Unicode. Almost all encodings meet this criteria, but some do not. Notably,
-- Shift-JIS (CP932) and Big5 contain several different encodings of the same
-- Unicode codepoint.
--
-- On Windows, you can access supported code pages with the prefix
-- @CP@; for example, @\"CP1250\"@.
--
mkTextEncoding :: String -> IO TextEncoding
mkTextEncoding e = case mb_coding_failure_mode of
Nothing -> unknownEncodingErr e
Just cfm -> mkTextEncoding' cfm enc
where
(enc, suffix) = span (/= '/') e
mb_coding_failure_mode = case suffix of
"" -> Just ErrorOnCodingFailure
"//IGNORE" -> Just IgnoreCodingFailure
"//TRANSLIT" -> Just TransliterateCodingFailure
"//ROUNDTRIP" -> Just RoundtripFailure
_ -> Nothing
mkTextEncoding' :: CodingFailureMode -> String -> IO TextEncoding
mkTextEncoding' cfm enc =
case [toUpper c | c <- enc, c /= '-'] of
-- UTF-8 and friends we can handle ourselves
"UTF8" -> return $ UTF8.mkUTF8 cfm
"UTF16" -> return $ UTF16.mkUTF16 cfm
"UTF16LE" -> return $ UTF16.mkUTF16le cfm
"UTF16BE" -> return $ UTF16.mkUTF16be cfm
"UTF32" -> return $ UTF32.mkUTF32 cfm
"UTF32LE" -> return $ UTF32.mkUTF32le cfm
"UTF32BE" -> return $ UTF32.mkUTF32be cfm
-- On AIX, we want to avoid iconv, because it is either
-- a) totally broken, or b) non-reentrant, or c) actually works.
-- Detecting b) is difficult as you'd have to trigger the reentrancy
-- corruption.
-- Therefore, on AIX, we handle the popular ASCII and latin1 encodings
-- ourselves. For consistency, we do the same on other platforms.
-- We use `mkLatin1_checked` instead of `mkLatin1`, since the latter
-- completely ignores the CodingFailureMode (TEST=encoding005).
_ | isAscii -> return (Latin1.mkAscii cfm)
_ | isLatin1 -> return (Latin1.mkLatin1_checked cfm)
#if defined(mingw32_HOST_OS)
'C':'P':n | [(cp,"")] <- reads n -> return $ CodePage.mkCodePageEncoding cfm cp
_ -> unknownEncodingErr (enc ++ codingFailureModeSuffix cfm)
#else
-- Otherwise, handle other encoding needs via iconv.
-- Unfortunately there is no good way to determine whether iconv is actually
-- functional without telling it to do something.
_ -> do res <- Iconv.mkIconvEncoding cfm enc
case res of
Just e -> return e
Nothing -> unknownEncodingErr (enc ++ codingFailureModeSuffix cfm)
#endif
where
isAscii = enc `elem` asciiEncNames
isLatin1 = enc `elem` latin1EncNames
asciiEncNames = -- ASCII aliases specified by RFC 1345 and RFC 3808.
[ "ANSI_X3.4-1968", "iso-ir-6", "ANSI_X3.4-1986", "ISO_646.irv:1991"
, "US-ASCII", "us", "IBM367", "cp367", "csASCII", "ASCII", "ISO646-US"
]
latin1EncNames = -- latin1 aliases specified by RFC 1345 and RFC 3808.
[ "ISO_8859-1:1987", "iso-ir-100", "ISO_8859-1", "ISO-8859-1", "latin1",
"l1", "IBM819", "CP819", "csISOLatin1"
]
latin1_encode :: CharBuffer -> Buffer Word8 -> IO (CharBuffer, Buffer Word8)
latin1_encode input output = fmap (\(_why,input',output') -> (input',output')) $ Latin1.latin1_encode input output -- unchecked, used for char8
--latin1_encode = unsafePerformIO $ do mkTextEncoder Iconv.latin1 >>= return.encode
latin1_decode :: Buffer Word8 -> CharBuffer -> IO (Buffer Word8, CharBuffer)
latin1_decode input output = fmap (\(_why,input',output') -> (input',output')) $ Latin1.latin1_decode input output
--latin1_decode = unsafePerformIO $ do mkTextDecoder Iconv.latin1 >>= return.encode
unknownEncodingErr :: String -> IO a
unknownEncodingErr e = ioException (IOError Nothing NoSuchThing "mkTextEncoding"
("unknown encoding:" ++ e) Nothing Nothing)
| ezyang/ghc | libraries/base/GHC/IO/Encoding.hs | bsd-3-clause | 12,086 | 0 | 13 | 2,241 | 1,398 | 843 | 555 | 109 | 11 |
module T5623 where
import Foreign.Storable
import Control.Monad
import GHC.Ptr
foo :: Ptr Float -> IO Float
foo p = liftM2 (+) (peekElemOff q 0) (peekElemOff q 1)
where
q = p `plusPtr` 4
| shlevy/ghc | testsuite/tests/simplCore/should_compile/T5623.hs | bsd-3-clause | 195 | 0 | 7 | 41 | 80 | 44 | 36 | 7 | 1 |
module T3811f where
class !Foo a
| ghc-android/ghc | testsuite/tests/parser/should_fail/T3811f.hs | bsd-3-clause | 35 | 2 | 5 | 8 | 13 | 7 | 6 | -1 | -1 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses,UndecidableInstances, FlexibleContexts, TypeSynonymInstances #-}
import Data.Monoid
import Control.Monad
import Control.Monad.Writer
import Control.Monad.Trans
import System.IO
import Data.IORef
import Network
import System (getArgs)
type Action =
Atom Action
| Fork Action Action
| Stop
type C a = (a -> Action) -> Action
instance Monad C where
(>>=) :: C a -> (a -> C b) -> C b
m >>= f = \ ( k :: b -> Action) -> m ( \ a -> f a k )
return :: a -> C a
return x = \ k -> k x
| kayceesrk/code-snippets | poor_man_conc.hs | isc | 549 | 5 | 9 | 115 | 186 | 104 | 82 | -1 | -1 |
module PhotonMap
(
PhotonMap
, PhotonSurfaceInteraction
, count
, generatePhotonMap
, getLightToViewerAtIntersection
)
where
import Numeric.FastMath ( )
import Control.DeepSeq ( NFData(..), force )
import Control.Monad ( replicateM, liftM )
import Data.KdMap.Static ( KdMap, buildWithDist, inRadius )
import Core ( Point(..), Ray(..), UnitVector
, normalize, translate, neg, magnitude, to, (|*|), (|.|), (|-|) )
import Light ( Light, PhotonLightSource, sumLights, scaled )
import Material ( probabilityDiffuseReflection, probabilitySpecularReflection
, diffuseLight, specularLight, brdf )
import Rnd ( Rnd, rndDouble, rndDirectionInHemisphere )
import Scene ( Scene, Intersection(..), allPhotonLightSources, sceneIntersection )
import Surface ( Surface(..) )
data PhotonSurfaceInteraction = PhotonSurfaceInteraction !UnitVector !Light
instance NFData PhotonSurfaceInteraction where
rnf (PhotonSurfaceInteraction !v !l) = rnf v `seq` rnf l `seq` ()
data PhotonMap = PhotonMap (KdMap Double Point PhotonSurfaceInteraction) !Int !Double
instance NFData PhotonMap where
rnf (PhotonMap !k !n !s) = rnf k `seq` rnf n `seq` rnf s `seq` ()
generatePhotonMap :: Scene -> Int -> Rnd PhotonMap
generatePhotonMap scene num = do
psis <- generatePhotonSurfaceInxs scene num
return $ force $ PhotonMap (buildWithDist pointToList distSquared psis) (length psis) (1.0 / fromIntegral num)
where
pointToList (Point !x !y !z) = [x, y, z]
distSquared (Point !x1 !y1 !z1) (Point !x2 !y2 !z2) = xd * xd + yd * yd + zd * zd
where
xd = x1 - x2
yd = y1 - y2
zd = z1 - z2
count :: PhotonMap -> Int
count (PhotonMap _ n _) = n
generatePhotonSurfaceInxs :: Scene -> Int -> Rnd [(Point, PhotonSurfaceInteraction)]
generatePhotonSurfaceInxs scene num =
concatM $ mapM (generatePhotonSurfaceInxsForLightSource scene numPerLight) lightSources
where
lightSources = allPhotonLightSources scene
numPerLight = num `div` length lightSources
generatePhotonSurfaceInxsForLightSource :: Scene -> Int -> PhotonLightSource -> Rnd [(Point, PhotonSurfaceInteraction)]
generatePhotonSurfaceInxsForLightSource scene num lightSource =
concatM $ replicateM num $ generateSinglePhotonSurfaceInxn scene lightSource
generateSinglePhotonSurfaceInxn :: Scene -> PhotonLightSource -> Rnd [(Point, PhotonSurfaceInteraction)]
generateSinglePhotonSurfaceInxn scene lightSource =
lightSource >>= traceLightRay scene
traceLightRay :: Scene -> (Ray, Light) -> Rnd [(Point, PhotonSurfaceInteraction)]
traceLightRay !scene !incoming@(!incomingRay, incomingLight) =
case maybeIntersection of
Nothing -> return []
Just ix -> do
maybeOutgoingLight <- computeOutgoingLightRay ix incoming
let photonIntersection = toPhotonIntersection ix
recurse <- maybe (return []) (traceLightRay scene) maybeOutgoingLight
return (photonIntersection : recurse)
where
!maybeIntersection = sceneIntersection scene incomingRay
toPhotonIntersection (Intersection (Ray _ !rd) _ _ !pos) =
(pos, PhotonSurfaceInteraction rd incomingLight)
computeOutgoingLightRay :: Intersection -> (Ray, Light) -> Rnd (Maybe (Ray, Light))
computeOutgoingLightRay (Intersection _ (Surface _ !nrm !material) _ !wp) (Ray _ !incomingRay, !incomingLight) = do
prob <- rndDouble 0.0 1.0
go prob
where
!pd = probabilityDiffuseReflection material
ps = probabilitySpecularReflection material
go prob | prob < pd = goDiffuse
| prob < pd + ps = goSpecular
| otherwise = return Nothing
goDiffuse = do
dr <- diffuseReflect surfaceNormal
return $ Just ( Ray movedFromSurface dr
, diffuseLight material incomingLight
)
goSpecular =
return $ Just ( Ray movedFromSurface $ specularReflect surfaceNormal incomingRay
, specularLight material incomingLight
)
!surfaceNormal = nrm wp
!movedFromSurface = translate (surfaceNormal |*| epsilon) wp
!epsilon = 0.0001
specularReflect :: UnitVector -> UnitVector -> UnitVector
specularReflect !surfaceNormal !incomingRay =
normalize $ surfaceNormal |*| ((surfaceNormal |*| 2) |.| incomingRay) |-| incomingRay
diffuseReflect :: UnitVector -> Rnd UnitVector
diffuseReflect =
rndDirectionInHemisphere
getLightToViewerAtIntersection :: PhotonMap -> Intersection -> Light
getLightToViewerAtIntersection (PhotonMap !kdmap _ !scale) (Intersection (Ray _ !outgoingVector) (Surface _ !nrm !material) _ !wp) =
(sumLights $ map attenuateByDistance nearInteractions) `scaled` scale
where
attenuateByDistance (!pp, !psi) =
brdfForInteraction psi `scaled` coneFilter pp wp maxDistance
brdfForInteraction (PhotonSurfaceInteraction !incomingVector !incomingLight) =
surfaceBrdf incomingLight (neg incomingVector) (neg outgoingVector) surfaceNormal wp
!surfaceNormal = nrm wp
!surfaceBrdf = brdf material
!nearInteractions = inRadius kdmap maxDistance wp
!maxDistance = 5.0
concatM :: Monad m => m [[a]] -> m [a]
concatM =
liftM concat
coneFilter :: Point -> Point -> Double -> Double
coneFilter !pp !wp !maxDistance =
(1.0 - distance / (2.0 * maxDistance)) / maxDistance
where
!distance = magnitude (pp `to` wp)
-- gaussianFilter :: Point -> Point -> Double -> Double
-- gaussianFilter !pp !wp !maxDistance =
-- a * (1.0 - (1.0 - exp (mb * px)) / dv)
-- where
-- !a = 0.918
-- !mb = -1.953
-- !dv = 1.0 - exp mb
-- !ds = magnitudeSquared (pp `to` wp)
-- !px = ds / (2.0 * maxDistance * maxDistance)
| stu-smith/rendering-in-haskell | src/experiment06/PhotonMap.hs | mit | 5,832 | 0 | 14 | 1,329 | 1,625 | 834 | 791 | -1 | -1 |
module AoC.Day4 (
Room,
hash,
sector,
checksum,
roomParser,
hashLetterCompare,
isRealRoom,
order,
solvers
) where
import GHC.Exts (groupWith)
import Data.Char
import Data.List (sortBy, intercalate, elemIndex, isInfixOf)
import Data.Text (Text)
import Text.Parsec.Char
import Text.Parsec.Combinator
import Text.Parsec.Text
import Text.Parsec.Prim hiding ((<|>), many)
import AoC.Combinator
data Room = Room { hash :: String, sector :: Int, checksum :: String }
deriving (Eq, Show)
hashParser :: Parser String
hashParser = concat <$> endBy1 (many1 letter) (char '-')
sectorIdParser :: Parser Int
sectorIdParser = read <$> many1 digit
checksumParser :: Parser String
checksumParser = char '[' *> many1 letter <* char ']'
roomParser :: Parser Room
roomParser = Room <$> hashParser <*> sectorIdParser <*> checksumParser
inputParser :: Parser [Room]
inputParser = endBy roomParser endOfLine
hashLetterCompare :: Ord a => [a] -> [a] -> Ordering
hashLetterCompare l1s l2s =
case compare (length l2s) (length l1s) of
LT -> LT
GT -> GT
EQ -> compare (head l1s) (head l2s)
order :: String -> String
order = fmap head . sortBy hashLetterCompare . groupWith id
isRealRoom :: Room -> Bool
isRealRoom r = compute r == checksum r
where compute = take 5 . order . hash
solver_1 :: [Room] -> Int
solver_1 =
filter isRealRoom
|> fmap sector
|> sum
alphabet = "abcdefghijklmnopqrstuvwxyz"
rotate :: Int -> Char -> Char
rotate n c = alphabet !! mod (base + n) 26
where
base = unwrap (elemIndex c alphabet)
decode :: Room -> Room
decode (Room h s c) = Room (fmap (rotate s) h) s c
solver_2 :: [Room] -> [Room]
solver_2 =
filter isRealRoom
|> fmap decode
|> filter (isInfixOf "northpole" . hash)
solvers :: [Text -> Text]
solvers = [process inputParser solver_1, process inputParser solver_2]
| rzeigler/advent-of-code-2016 | Lib/AoC/Day4.hs | mit | 1,842 | 0 | 10 | 352 | 676 | 362 | 314 | 60 | 3 |
module Raw (toRawFile) where
toRawFile :: [String] -> String
toRawFile l = toRawArray l 1
where toRawArray :: [String] -> Int -> String
toRawArray [] _ = []
toRawArray (x:xs) n = x ++ nl ++ toRawArray xs (n + 1)
where nl
| n `mod` 16 == 0 = "\n"
| otherwise = " " | cirquit/Personal-Repository | Haskell/Playground/UTFTConverter/src/Raw.hs | mit | 339 | 0 | 13 | 131 | 135 | 71 | 64 | 9 | 2 |
module API where
import Control.Monad.Reader
import Control.Monad.Writer.Strict
import Control.Monad.Except
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Lazy as L
import Data.Time.Clock
import Network.HTTP.Conduit
import Network.HTTP.Types (hAuthorization, Status, Method)
import Network.Google.OAuth2
import Prelude hiding (log)
type API
= ExceptT String
( WriterT [String]
( ReaderT Manager
IO ))
type Token = OAuth2Token
type Client = OAuth2Client
type URL = String
useAPIWith :: API a -> Manager -> IO (Either String a, [String])
useAPIWith api manager =
runWriterT (runExceptT api) `runReaderT` manager
runAPI :: API a -> IO (Either String a)
runAPI api = do
manager <- newManager tlsManagerSettings
(result, logs) <- api `useAPIWith` manager
mapM_ putStrLn logs
return result
io :: IO a -> API a
io = liftIO
runAuthorized :: Token -> Request -> API (Response L.ByteString)
runAuthorized token request = do
manager <- ask
io $ authorize token request `httpLbs` manager
authorize token request = request
{ requestHeaders =
[ (hAuthorization, B8.pack $ "Bearer " ++ token) ]
++ requestHeaders request }
log :: String -> API ()
log line = do
time <- io getCurrentTime
tell [show time ++ ": " ++ line ++ "\n"]
die :: [String] -> API a
die pack = do
let msg = unwords pack
log $ unwords ["ERROR:", msg]
throwError msg
| AdityaKumarRavikanti/Boxes | src/API.hs | mit | 1,633 | 0 | 12 | 450 | 510 | 275 | 235 | 48 | 1 |
primes = f [2..]
where
f (x:xs) = x : f (filter (\y -> mod y x /= 0 ) xs)
main = do
print ( sum (takeWhile (\x -> x <= 2000000) primes) )
| ddeeff/sandbox | haskell/eular/10.hs | mit | 145 | 0 | 14 | 42 | 98 | 50 | 48 | 4 | 1 |
module Unused.CLI.Views.FingerprintError
( fingerprintError
) where
import qualified Data.List as L
import qualified Unused.CLI.Views.Error as V
import Unused.Cache.DirectoryFingerprint (FingerprintOutcome(..))
fingerprintError :: FingerprintOutcome -> IO ()
fingerprintError e = do
V.errorHeader "There was a problem generating a cache fingerprint:"
printOutcomeMessage e
printOutcomeMessage :: FingerprintOutcome -> IO ()
printOutcomeMessage (MD5ExecutableNotFound execs) =
putStrLn $
"Unable to find any of the following executables \
\in your PATH: " ++ L.intercalate ", " execs
| joshuaclayton/unused | src/Unused/CLI/Views/FingerprintError.hs | mit | 633 | 0 | 8 | 120 | 125 | 70 | 55 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Emit where
import Debug.Trace
import LLVM.General.Module
import LLVM.General.Context
import qualified LLVM.General.AST as AST
import qualified LLVM.General.AST.Constant as C
import qualified LLVM.General.AST.Float as F
import qualified LLVM.General.AST.FloatingPointPredicate as FP
import Data.Word
import Data.Int
import Control.Monad.Except
import Control.Applicative
import qualified Data.Map as Map
import JIT
import Utils
import Codegen
import qualified Syntax as S
one = cons $ C.Float (F.Double 1.0)
zero = cons $ C.Float (F.Double 0.0)
false = zero
true = one
astTypeFromString :: String -> AST.Type
astTypeFromString varType
| varType == "Double" = double
| otherwise = emptyType
varDefName :: S.Expr -> String
varDefName (S.VarDef varName _) = varName
varDefType :: S.Expr -> AST.Type
varDefType (S.VarDef _ varType) = astTypeFromString varType
toSig :: [S.Expr] -> [(AST.Type, AST.Name)]
toSig = map (\varDef -> (varDefType varDef, AST.Name (varDefName varDef)))
codegenTop :: S.Expr -> LLVM ()
codegenTop (S.Function name args body) = do
define double name fnargs bls
where
fnargs = toSig args
bls = createBlocks $ execCodegen $ do
entry <- addBlock entryBlockName
setBlock entry
forM args $ \varDef -> do
let varName = varDefName varDef
var <- alloca double
store var (local (AST.Name varName))
assign varName var
cgen body >>= ret
codegenTop (S.Extern name args) = do
external double name fnargs
where fnargs = toSig args
codegenTop exp = do
define double "main" [] blks
where
blks = createBlocks $ execCodegen $ do
entry <- addBlock entryBlockName
setBlock entry
cgen exp >>= ret
-------------------------------------------------------------------------------
-- Operations
-------------------------------------------------------------------------------
lt :: AST.Operand -> AST.Operand -> Codegen AST.Operand
lt a b = do
test <- fcmp FP.ULT a b
uitofp double test
binops = Map.fromList [
("+", fadd)
, ("-", fsub)
, ("*", fmul)
, ("/", fdiv)
, ("<", lt)
]
-- Example AST
-- [When [Clause (BinaryOp "<" (Var "x") (Float 3.0)) (Float 1.0),Clause Else (BinaryOp "+" (Call "fib" [BinaryOp "-" (Var "x") (Float 1.0)]) (Call "fib" [BinaryOp "-" (Var "x") (Float 2.0)]))]]
cgen :: S.Expr -> Codegen AST.Operand
--cgen a | trace ("cgen " ++ show a) False = undefined
cgen (S.UnaryOp op a) = do
cgen $ S.Call ("unary" ++ op) [a]
cgen (S.BinaryOp "=" (S.Var var) val) = do
a <- getvar var
cval <- cgen val
store a cval
return cval
cgen (S.BinaryOp op a b) = do
case Map.lookup op binops of
Just f -> do
ca <- cgen a
cb <- cgen b
f ca cb
Nothing -> error "No such operator"
cgen (S.Var x) = getvar x >>= load
cgen (S.Float n) = return $ cons $ C.Float (F.Double n)
cgen (S.Call fn args) = do
largs <- mapM cgen args
call (externf (AST.Name fn)) largs
cgen (S.When clauses) = do
entryBlock <- entry
exitBlock <- addBlock "case.exit"
-- Throw error if else clause is zero or greater than 1
-- Else should always be last
let elseClause = last clauses
let otherClauses = reverse (init clauses)
elsePhi <- processElse exitBlock elseClause
descendants <- scanM (processClause exitBlock) elsePhi otherClauses
setBlock entryBlock
let firstBlock = snd (last descendants)
br firstBlock
setBlock exitBlock
phi double (map fst descendants)
processElse :: AST.Name -> S.Expr -> Codegen ((AST.Operand, AST.Name), AST.Name)
processElse exitBlock (S.Clause S.Else code) = do
elseBlock <- addBlock "case.else"
setBlock elseBlock
val <- cgen code
br exitBlock
elseBlock <- getBlock
return ((val, elseBlock), elseBlock)
processClause :: AST.Name -> ((AST.Operand, AST.Name), AST.Name) -> S.Expr -> Codegen ((AST.Operand, AST.Name), AST.Name)
processClause exitBlock nextPhi (S.Clause cond code) = do
let nextBlock = snd nextPhi
caseTestBlock <- addBlock "case.test.block"
caseCodeBlock <- addBlock "case.code.block"
setBlock caseTestBlock
cond <- cgen cond
test <- fcmp FP.ONE false cond
cbr test caseCodeBlock nextBlock
caseTestBlock <- getBlock
setBlock caseCodeBlock
val <- cgen code -- Generate code for the condition
br exitBlock
caseCodeBlock <- getBlock
return ((val, caseCodeBlock), caseTestBlock)
-------------------------------------------------------------------------------
-- Compilation
-------------------------------------------------------------------------------
liftError :: ExceptT String IO a -> IO a
liftError = runExceptT >=> either fail return
codegen :: AST.Module -> [S.Expr] -> IO AST.Module
codegen mod fns = do
res <- runJIT oldast
case res of
Right newast -> return newast
Left err -> putStrLn err >> return oldast
where
modn = mapM codegenTop fns
oldast = runLLVM mod modn | NoxHarmonium/lithium-lang | src/Emit.hs | mit | 5,020 | 0 | 19 | 1,069 | 1,635 | 810 | 825 | 129 | 2 |
{-# LANGUAGE CPP #-}
-- |
-- Experimental combinators, that may become part of the main distribution, if
-- they turn out to be useful for a wider audience.
module Test.Hspec.Expectations.Pretty.Contrib (
-- * Predicates
-- | (useful in combination with `shouldSatisfy`)
isLeft
, isRight
) where
#if MIN_VERSION_base(4,7,0)
import Data.Either
#else
isLeft :: Either a b -> Bool
{-# DEPRECATED isLeft "use Data.Either.Compat.isLeft from package base-compat instead" #-}
isLeft (Left _) = True
isLeft (Right _) = False
isRight :: Either a b -> Bool
{-# DEPRECATED isRight "use Data.Either.Compat.isRight from package base-compat instead" #-}
isRight (Left _) = False
isRight (Right _) = True
#endif
| myfreeweb/hspec-expectations-pretty-diff | src/Test/Hspec/Expectations/Pretty/Contrib.hs | mit | 705 | 0 | 4 | 115 | 31 | 24 | 7 | 12 | 1 |
module PodTypes where
data Podcast =
Podcast {castId :: Integer
, castURL :: String
}
deriving (Eq, Show, Read)
data Episode =
Episode {epId :: Integer
, epCast :: Integer
, epURL :: String
, epDone :: Bool
}
deriving (Eq, Show, Read)
| zhangjiji/real-world-haskell | ch22/PodTypes.hs | mit | 311 | 0 | 8 | 119 | 88 | 53 | 35 | 11 | 0 |
module Atom.TextEditor where
import Control.Applicative
import Data.Text (Text)
import GHCJS.Foreign
import GHCJS.Prim (toJSInt)
import GHCJS.Types
import Atom.Marker
import Atom.Decoration
data TextEditor_
type TextEditor = JSRef TextEditor_
foreign import javascript unsafe
"$1.getPath()" js_getPath :: TextEditor -> IO JSString
type Path = Text
getPath :: TextEditor -> IO Path
getPath editor = fromJSString <$> js_getPath editor
data Range_
type Range = JSRef Range_
foreign import javascript unsafe
"new require('atom').Range([$1, $2], [$3, $4]).freeze()"
js_rangeBetween :: JSNumber -> JSNumber -> JSNumber -> JSNumber -> Range
rangeBetween :: Int -> Int -> Int -> Int -> Range
rangeBetween sx sy ex ey = do
js_rangeBetween (toJSInt sx) (toJSInt sy) (toJSInt ex) (toJSInt ey)
foreign import javascript unsafe
"$1.markBufferRange($2)"
markBufferRange :: TextEditor -> Range -> IO Marker
foreign import javascript unsafe
"$1.decorateMarker($2, {type: 'highlight', class: $3})"
js_decorateMarker :: TextEditor -> Marker -> JSString -> IO JSDecoration
type ClassName = Text
decorateMarker :: TextEditor -> Marker -> ClassName -> IO Decoration
decorateMarker editor marker className =
fromJSDecoration <$> js_decorateMarker editor marker (toJSString className) | CRogers/stack-ide-atom | haskell/src/Atom/TextEditor.hs | mit | 1,292 | 10 | 9 | 196 | 346 | 180 | 166 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module Main where
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Either (runEitherT)
import Control.Monad.Trans.Reader (ReaderT, runReaderT, ask)
import Data.String (IsString(..))
import qualified Data.Text.IO as T
import Safe (atMay)
import System.Exit (ExitCode(..), exitWith)
import System.IO (hSetBuffering, BufferMode(NoBuffering), stdout)
import Servant.Client (ServantError)
import Handlers (runAuth, runQuery)
import Types ( SearchResult(..), QueryString(..)
, Album(..), Artist(..))
showError :: ServantError -> IO ExitCode
showError err = do
putStrLn $ "Error: " ++ show err
return $ ExitFailure 1
numerate :: (Monoid str, IsString str)
=> (a -> str) -> Int -> [a] -> [str]
numerate f start xs = let addBrace = (`mappend` ") ") . fromString . show
numbers = fmap addBrace [start..]
list = fmap f xs
in zipWith mappend numbers list
showInfo :: SearchResult -> IO ()
showInfo = runReaderT $ do
SearchResult{..} <- ask
liftIO $ do
let artistsCount = length artists
startNumber = artistsCount + 1
showAlbums album = mconcat [ title album
, " ("
, fromString . show $ year album
, ")"]
numeratedArtists = numerate name 1 artists
numeratedAlbums = numerate showAlbums startNumber albums
T.putStrLn "\nFound artists:"
mapM_ T.putStrLn numeratedArtists
T.putStrLn "\nFound albums:"
mapM_ T.putStrLn numeratedAlbums
choosePoint
choosePoint :: ReaderT SearchResult IO ()
choosePoint = do
SearchResult{..} <- ask
liftIO $ T.putStr "\nChoose point: "
point <- liftIO readLn
let failCase = liftIO $ T.putStrLn "Point is out of bounds! Please try again!"
safePrint xs i = maybe (failCase >> choosePoint)
(liftIO . print)
(xs `atMay` i)
artistsCount = length artists
startNumber = artistsCount + 1
if point > artistsCount
then safePrint albums (point - startNumber)
else safePrint artists (point - 1)
main :: IO ()
main = do
hSetBuffering stdout NoBuffering
T.putStr "Input search query: "
query <- T.getLine
res <- runEitherT $ do
auth <- runAuth
searchResult <- runQuery auth $ QueryString query
liftIO $ showInfo searchResult
either showError (const $ return ExitSuccess) res >>= exitWith
| ushfnuk/music | src/Main.hs | mit | 2,645 | 0 | 17 | 746 | 768 | 404 | 364 | 67 | 2 |
{-# LANGUAGE BangPatterns #-}
-- TODO: Add some comments describing how this implementation works.
-- | A reimplementation of Data.WordMap that seems to be 1.4-4x faster.
module Data.IntMap.Bounded.Base where
import Control.DeepSeq (NFData(..))
import Control.Applicative (Applicative(..))
import Data.Monoid (Monoid(..))
import qualified Data.Foldable (Foldable(..))
import Data.Traversable (Traversable(..))
import Data.Functor ((<$>))
import Data.Word (Word)
import Data.Bits (xor)
import Data.WordMap.Base (WordMap(..), WordMap_(..), Node(..))
import qualified Data.WordMap.Base as W
import qualified Data.WordMap.Lazy as W (unionM, differenceM, intersectionM)
import qualified Data.IntSet (IntSet, fromDistinctAscList, member, notMember)
import Prelude hiding (foldr, foldl, lookup, null, map, min, max)
type Key = Int
newtype IntMap a = IntMap (W.WordMap a) deriving (Eq)
instance Show a => Show (IntMap a) where
show m = "fromList " ++ show (toList m)
instance Functor IntMap where
fmap f (IntMap m) = IntMap (fmap f m)
instance Data.Foldable.Foldable IntMap where
foldr = foldr
foldr' = foldr'
foldl = foldl
foldl' = foldl'
instance Traversable IntMap where
traverse f = start
where
start (IntMap (WordMap Empty)) = pure (IntMap (WordMap Empty))
start (IntMap (WordMap (NonEmpty min minV Tip))) = (\minV' -> IntMap (WordMap (NonEmpty min minV' Tip))) <$> f minV
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 =
(\r' maxV' minV' l' -> IntMap (WordMap (NonEmpty min minV' (Bin max maxV' l' r'))))
<$> goR r <*> f maxV <*> f minV <*> goL l
| otherwise =
(\minV' l' r' maxV' -> IntMap (WordMap (NonEmpty min minV' (Bin max maxV' l' r'))))
<$> f minV <*> goL l <*> goR r <*> f maxV
goL Tip = pure Tip
goL (Bin max maxV l r) = (\l' r' v' -> Bin max v' l' r') <$> goL l <*> goR r <*> f maxV
goR Tip = pure Tip
goR (Bin min minV l r) = Bin min <$> f minV <*> goL l <*> goR r
instance Monoid (IntMap a) where
mempty = empty
mappend = union
instance NFData a => NFData (IntMap a) where
rnf (IntMap m) = rnf m
-- | /O(min(n,W))/. Find the value at a key.
-- Calls 'error' when the element can not be found.
--
-- > fromList [(5,'a'), (3,'b')] ! 1 Error: element not in the map
-- > fromList [(5,'a'), (3,'b')] ! 5 == 'a'
(!) :: IntMap a -> Key -> a
(!) m k = findWithDefault (error $ "IntMap.!: key " ++ show k ++ " is not an element of the map") k m
-- | Same as 'difference'.
(\\) :: IntMap a -> IntMap b -> IntMap a
IntMap m1 \\ IntMap m2 = IntMap (m1 W.\\ m2)
-- | /O(1)/. Is the map empty?
null :: IntMap a -> Bool
null (IntMap m) = W.null m
-- | /O(n)/. Number of elements in the map.
size :: IntMap a -> Int
size (IntMap m) = W.size m
-- | /O(min(n,W))/. Is the key a member of the map?
member :: Key -> IntMap a -> Bool
member k (IntMap m) = W.member (i2w k) m
-- | /O(min(n,W))/. Is the key not a member of the map?
notMember :: Key -> IntMap a -> Bool
notMember k (IntMap m) = W.notMember (i2w k) m
-- | /O(min(n,W))/. Lookup the value at a key in the map.
lookup :: Key -> IntMap a -> Maybe a
lookup k (IntMap m) = W.lookup (i2w k) m
-- | /O(min(n,W))/. The expression @findWithDefault def k map@ returns
-- the value at key @k@ or returns @def@ when the key is not an element
-- of the map.
findWithDefault :: a -> Key -> IntMap a -> a
findWithDefault def k (IntMap m) = W.findWithDefault def (i2w k) m
-- | /O(log n)/. Find largest key smaller than the given one and return the
-- corresponding (key, value) pair.
--
-- > lookupLT 3 (fromList [(3,'a'), (5,'b')]) == Nothing
-- > lookupLT 4 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
lookupLT :: Key -> IntMap a -> Maybe (Key, a)
lookupLT k = k `seq` start
where
start (IntMap (WordMap Empty)) = Nothing
start (IntMap (WordMap (NonEmpty min minV Tip)))
| w2i min < k = Just (w2i min, minV)
| otherwise = Nothing
start (IntMap (WordMap (NonEmpty min minV node@(Bin max maxV l r))))
| w2i (xor min max) < 0 =
if w2i min < k -- if this is true, we know the result is positive
then Just (goL (xor min (i2w k)) min minV l)
else if w2i max < k -- if this is true, k is between the positives and negatives
then Just (w2i max, maxV)
else case r of
Tip -> Nothing
Bin minI minVI lI rI
| minI >= i2w k -> Nothing
| otherwise -> Just (goL (xor minI (i2w k)) minI minVI (Bin max maxV lI rI))
| w2i min < k = Just (goL (xor min (i2w k)) min minV node)
| otherwise = Nothing
goL !_ min minV Tip = (w2i min, minV)
goL !xorCache min minV (Bin max maxV l r)
| w2i max < k = (w2i max, maxV)
| xorCache < xorCacheMax = goL xorCache min minV l
| otherwise = goR xorCacheMax r min minV l
where
xorCacheMax = xor (i2w k) max
goR !_ Tip fMin fMinV fallback = getMax fMin fMinV fallback
goR !xorCache (Bin min minV l r) fMin fMinV fallback
| w2i min >= k = getMax fMin fMinV fallback
| xorCache < xorCacheMin = goR xorCache r min minV l
| otherwise = goL xorCacheMin min minV l
where
xorCacheMin = xor min (i2w k)
getMax min minV Tip = (w2i min, minV)
getMax _ _ (Bin max maxV _ _) = (w2i max, maxV)
-- | /O(log n)/. Find largest key smaller or equal to the given one and return
-- the corresponding (key, value) pair.
--
-- > lookupLE 2 (fromList [(3,'a'), (5,'b')]) == Nothing
-- > lookupLE 4 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- > lookupLE 5 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
lookupLE :: Key -> IntMap a -> Maybe (Key, a)
lookupLE k = k `seq` start
where
start (IntMap (WordMap Empty)) = Nothing
start (IntMap (WordMap (NonEmpty min minV Tip)))
| w2i min <= k = Just (w2i min, minV)
| otherwise = Nothing
start (IntMap (WordMap (NonEmpty min minV node@(Bin max maxV l r))))
| w2i (xor min max) < 0 =
if w2i min <= k -- if this is true, we know the result is positive
then Just (goL (xor min (i2w k)) min minV l)
else if w2i max <= k -- if this is true, k is between the positives and negatives
then Just (w2i max, maxV)
else case r of
Tip -> Nothing
Bin minI minVI lI rI
| minI > i2w k -> Nothing
| otherwise -> Just (goL (xor minI (i2w k)) minI minVI (Bin max maxV lI rI))
| w2i min <= k = Just (goL (xor min (i2w k)) min minV node)
| otherwise = Nothing
goL !_ min minV Tip = (w2i min, minV)
goL !xorCache min minV (Bin max maxV l r)
| w2i max <= k = (w2i max, maxV)
| xorCache < xorCacheMax = goL xorCache min minV l
| otherwise = goR xorCacheMax r min minV l
where
xorCacheMax = xor (i2w k) max
goR !_ Tip fMin fMinV fallback = getMax fMin fMinV fallback
goR !xorCache (Bin min minV l r) fMin fMinV fallback
| w2i min > k = getMax fMin fMinV fallback
| xorCache < xorCacheMin = goR xorCache r min minV l
| otherwise = goL xorCacheMin min minV l
where
xorCacheMin = xor min (i2w k)
getMax min minV Tip = (w2i min, minV)
getMax _ _ (Bin max maxV _ _) = (w2i max, maxV)
-- | /O(log n)/. Find smallest key greater than the given one and return the
-- corresponding (key, value) pair.
--
-- > lookupGT 4 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- > lookupGT 5 (fromList [(3,'a'), (5,'b')]) == Nothing
lookupGT :: Key -> IntMap a -> Maybe (Key, a)
lookupGT k = k `seq` start
where
start (IntMap (WordMap Empty)) = Nothing
start (IntMap (WordMap (NonEmpty min minV Tip)))
| w2i min > k = Just (w2i min, minV)
| otherwise = Nothing
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 =
if w2i max > k -- if this is true, we know the result is negative
then Just (goR (xor (i2w k) max) max maxV r)
else if w2i min > k -- if this is true, k is between the positives and negatives
then Just (w2i min, minV)
else case l of
Tip -> Nothing
Bin maxI maxVI lI rI
| maxI <= i2w k -> Nothing
| otherwise -> Just (goR (xor (i2w k) maxI) maxI maxVI (Bin min minV lI rI))
| w2i max > k = Just (goR (xor (i2w k) max) max maxV (Bin min minV l r))
| otherwise = Nothing
goL !_ Tip fMax fMaxV fallback = getMin fMax fMaxV fallback
goL !xorCache (Bin max maxV l r) fMax fMaxV fallback
| w2i max <= k = getMin fMax fMaxV fallback
| xorCache < xorCacheMax = goL xorCache l max maxV r
| otherwise = goR xorCacheMax max maxV r
where
xorCacheMax = xor (i2w k) max
goR !_ max maxV Tip = (w2i max, maxV)
goR !xorCache max maxV (Bin min minV l r)
| w2i min > k = (w2i min, minV)
| xorCache < xorCacheMin = goR xorCache max maxV r
| otherwise = goL xorCacheMin l max maxV r
where
xorCacheMin = xor min (i2w k)
getMin max maxV Tip = (w2i max, maxV)
getMin _ _ (Bin min minV _ _) = (w2i min, minV)
-- | /O(log n)/. Find smallest key greater or equal to the given one and return
-- the corresponding (key, value) pair.
--
-- > lookupGE 3 (fromList [(3,'a'), (5,'b')]) == Just (3, 'a')
-- > lookupGE 4 (fromList [(3,'a'), (5,'b')]) == Just (5, 'b')
-- > lookupGE 6 (fromList [(3,'a'), (5,'b')]) == Nothing
lookupGE :: Key -> IntMap a -> Maybe (Key, a)
lookupGE k = k `seq` start
where
start (IntMap (WordMap Empty)) = Nothing
start (IntMap (WordMap (NonEmpty min minV Tip)))
| w2i min >= k = Just (w2i min, minV)
| otherwise = Nothing
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 =
if w2i max >= k -- if this is true, we know the result is negative
then Just (goR (xor (i2w k) max) max maxV r)
else if w2i min >= k -- if this is true, k is between the positives and negatives
then Just (w2i min, minV)
else case l of
Tip -> Nothing
Bin maxI maxVI lI rI
| maxI < i2w k -> Nothing
| otherwise -> Just (goR (xor (i2w k) maxI) maxI maxVI (Bin min minV lI rI))
| w2i max >= k = Just (goR (xor (i2w k) max) max maxV (Bin min minV l r))
| otherwise = Nothing
goL !_ Tip fMax fMaxV fallback = getMin fMax fMaxV fallback
goL !xorCache (Bin max maxV l r) fMax fMaxV fallback
| w2i max < k = getMin fMax fMaxV fallback
| xorCache < xorCacheMax = goL xorCache l max maxV r
| otherwise = goR xorCacheMax max maxV r
where
xorCacheMax = xor (i2w k) max
goR !_ max maxV Tip = (w2i max, maxV)
goR !xorCache max maxV (Bin min minV l r)
| w2i min >= k = (w2i min, minV)
| xorCache < xorCacheMin = goR xorCache max maxV r
| otherwise = goL xorCacheMin l max maxV r
where
xorCacheMin = xor min (i2w k)
getMin max maxV Tip = (w2i max, maxV)
getMin _ _ (Bin min minV _ _) = (w2i min, minV)
-- | /O(1)/. The empty map.
empty :: IntMap a
empty = IntMap W.empty
-- | /O(min(n,W))/. Delete a key and its value from the map.
-- When the key is not a member of the map, the original map is returned.
delete :: Key -> IntMap a -> IntMap a
delete k (IntMap m) = IntMap (W.delete (i2w k) m)
-- | /O(n+m)/. The (left-biased) union of two maps.
-- It prefers the first map when duplicate keys are encountered,
-- i.e. (@'union' == 'unionWith' 'const'@).
--
-- > union (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == fromList [(3, "b"), (5, "a"), (7, "C")]
union :: IntMap a -> IntMap a -> IntMap a
union (IntMap m1) (IntMap m2) = IntMap (W.union m1 m2)
unionM :: IntMap a -> IntMap a -> IntMap a
unionM (IntMap m1) (IntMap m2) = IntMap (W.unionM m1 m2)
-- | The union of a list of maps.
--
-- > unions [(fromList [(5, "a"), (3, "b")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "A3"), (3, "B3")])]
-- > == fromList [(3, "b"), (5, "a"), (7, "C")]
-- > unions [(fromList [(5, "A3"), (3, "B3")]), (fromList [(5, "A"), (7, "C")]), (fromList [(5, "a"), (3, "b")])]
-- > == fromList [(3, "B3"), (5, "A3"), (7, "C")]
unions :: [IntMap a] -> IntMap a
unions = Data.Foldable.foldl' union empty
-- | /O(n+m)/. Difference between two maps (based on keys).
--
-- > difference (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 3 "b"
difference :: IntMap a -> IntMap b -> IntMap a
difference (IntMap m1) (IntMap m2) = IntMap (W.difference m1 m2)
differenceM :: IntMap a -> IntMap b -> IntMap a
differenceM (IntMap m1) (IntMap m2) = IntMap (W.differenceM m1 m2)
-- | /O(n+m)/. The (left-biased) intersection of two maps (based on keys).
--
-- > intersection (fromList [(5, "a"), (3, "b")]) (fromList [(5, "A"), (7, "C")]) == singleton 5 "a"
intersection :: IntMap a -> IntMap b -> IntMap a
intersection (IntMap m1) (IntMap m2) = IntMap (W.intersection m1 m2)
intersectionM :: IntMap a -> IntMap b -> IntMap a
intersectionM (IntMap m1) (IntMap m2) = IntMap (W.intersectionM m1 m2)
-- | /O(n)/. Fold the values in the map using the given right-associative
-- binary operator, such that @'foldr' f z == 'Prelude.foldr' f z . 'elems'@.
--
-- For example,
--
-- > elems map = foldr (:) [] map
--
-- > let f a len = len + (length a)
-- > foldr f 0 (fromList [(5,"a"), (3,"bbb")]) == 4
{-# INLINE foldr #-}
foldr :: (a -> b -> b) -> b -> IntMap a -> b
foldr f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty _ minV Tip))) = f minV z
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goR r (f maxV (f minV (goL l z)))
| otherwise = f minV (goL l (goR r (f maxV z)))
goL Tip acc = acc
goL (Bin _ maxV l r) acc = goL l (goR r (f maxV acc))
goR Tip acc = acc
goR (Bin _ minV l r) acc = f minV (goL l (goR r acc))
-- | /O(n)/. Fold the values in the map using the given left-associative
-- binary operator, such that @'foldl' f z == 'Prelude.foldl' f z . 'elems'@.
--
-- For example,
--
-- > elems = reverse . foldl (flip (:)) []
--
-- > let f len a = len + (length a)
-- > foldl f 0 (fromList [(5,"a"), (3,"bbb")]) == 4
{-# INLINE foldl #-}
foldl :: (a -> b -> a) -> a -> IntMap b -> a
foldl f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty _ minV Tip))) = f z minV
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goL (f (f (goR z r) maxV) minV) l
| otherwise = f (goR (goL (f z minV) l) r) maxV
goL acc Tip = acc
goL acc (Bin _ maxV l r) = f (goR (goL acc l) r) maxV
goR acc Tip = acc
goR acc (Bin _ minV l r) = goR (goL (f acc minV) l) r
-- | /O(n)/. Fold the keys and values in the map using the given right-associative
-- binary operator, such that
-- @'foldrWithKey' f z == 'Prelude.foldr' ('uncurry' f) z . 'toAscList'@.
--
-- For example,
--
-- > keys map = foldrWithKey (\k x ks -> k:ks) [] map
--
-- > let f k a result = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldrWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (5:a)(3:b)"
{-# INLINE foldrWithKey #-}
foldrWithKey :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldrWithKey f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty min minV Tip))) = f' min minV z
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goR r (f' max maxV (f' min minV (goL l z)))
| otherwise = f' min minV (goL l (goR r (f' max maxV z)))
goL Tip acc = acc
goL (Bin max maxV l r) acc = goL l (goR r (f' max maxV acc))
goR Tip acc = acc
goR (Bin min minV l r) acc = f' min minV (goL l (goR r acc))
f' k a b = f (w2i k) a b
-- | /O(n)/. Fold the keys and values in the map using the given left-associative
-- binary operator, such that
-- @'foldlWithKey' f z == 'Prelude.foldl' (\\z' (kx, x) -> f z' kx x) z . 'toAscList'@.
--
-- For example,
--
-- > keys = reverse . foldlWithKey (\ks k x -> k:ks) []
--
-- > let f result k a = result ++ "(" ++ (show k) ++ ":" ++ a ++ ")"
-- > foldlWithKey f "Map: " (fromList [(5,"a"), (3,"b")]) == "Map: (3:b)(5:a)"
{-# INLINE foldlWithKey #-}
foldlWithKey :: (a -> Key -> b -> a) -> a -> IntMap b -> a
foldlWithKey f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty min minV Tip))) = f' z min minV
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goL (f' (f' (goR z r) max maxV) min minV) l
| otherwise = f' (goR (goL (f' z min minV) l) r) max maxV
goL acc Tip = acc
goL acc (Bin max maxV l r) = f' (goR (goL acc l) r) max maxV
goR acc Tip = acc
goR acc (Bin min minV l r) = goR (goL (f' acc min minV) l) r
f' a k b = f a (w2i k) b
-- | /O(n)/. Fold the keys and values in the map using the given monoid, such that
--
-- @'foldMapWithKey' f = 'Prelude.fold' . 'mapWithKey' f@
--
-- This can be an asymptotically faster than 'foldrWithKey' or 'foldlWithKey' for some monoids.
foldMapWithKey :: Monoid m => (Key -> a -> m) -> IntMap a -> m
foldMapWithKey f = start
where
start (IntMap (WordMap Empty)) = mempty
start (IntMap (WordMap (NonEmpty min minV Tip))) = f (w2i min) minV
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goR r `mappend` f (w2i max) maxV `mappend` f (w2i min) minV `mappend` goL l
| otherwise = f (w2i min) minV `mappend` goL l `mappend` goR r `mappend` f (w2i max) maxV
goL Tip = mempty
goL (Bin max maxV l r) = goL l `mappend` goR r `mappend` f (w2i max) maxV
goR Tip = mempty
goR (Bin min minV l r) = f (w2i min) minV `mappend` goL l `mappend` goR r
-- | /O(n)/. A strict version of 'foldr'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
{-# INLINE foldr' #-}
foldr' :: (a -> b -> b) -> b -> IntMap a -> b
foldr' f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty _ minV Tip))) = f minV $! z
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goR r $! f maxV $! f minV $! goL l $! z
| otherwise = f minV $! goL l $! goR r $! f maxV $! z
goL Tip acc = acc
goL (Bin _ maxV l r) acc = goL l $! goR r $! f maxV $! acc
goR Tip acc = acc
goR (Bin _ minV l r) acc = f minV $! goL l $! goR r $! acc
-- | /O(n)/. A strict version of 'foldl'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
{-# INLINE foldl' #-}
foldl' :: (a -> b -> a) -> a -> IntMap b -> a
foldl' f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty _ minV Tip))) = f z minV
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = s goL (s f (s f (s goR z r) maxV) minV) l
| otherwise = s f (s goR (s goL (s f z minV) l) r) maxV
goL acc Tip = acc
goL acc (Bin _ maxV l r) = s f (s goR (s goL acc l) r) maxV
goR acc Tip = acc
goR acc (Bin _ minV l r) = s goR (s goL (s f acc minV) l) r
s = ($!)
-- | /O(n)/. A strict version of 'foldrWithKey'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
{-# INLINE foldrWithKey' #-}
foldrWithKey' :: (Key -> a -> b -> b) -> b -> IntMap a -> b
foldrWithKey' f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty min minV Tip))) = f' min minV $! z
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = goR r $! f' max maxV $! f' min minV $! goL l $! z
| otherwise = f' min minV $! goL l $! goR r $! f' max maxV $! z
goL Tip acc = acc
goL (Bin max maxV l r) acc = goL l $! goR r $! f' max maxV $! acc
goR Tip acc = acc
goR (Bin min minV l r) acc = f' min minV $! goL l $! goR r $! acc
f' k a b = f (w2i k) a b
-- | /O(n)/. A strict version of 'foldlWithKey'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
{-# INLINE foldlWithKey' #-}
foldlWithKey' :: (a -> Key -> b -> a) -> a -> IntMap b -> a
foldlWithKey' f z = start
where
start (IntMap (WordMap Empty)) = z
start (IntMap (WordMap (NonEmpty min minV Tip))) = s f' z min minV
start (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = s goL (s f' (s f' (s goR z r) max maxV) min minV) l
| otherwise = s f' (s goR (s goL (s f' z min minV) l) r) max maxV
goL acc Tip = acc
goL acc (Bin max maxV l r) = s f' (s goR (s goL acc l) r) max maxV
goR acc Tip = acc
goR acc (Bin min minV l r) = s goR (s goL (s f' acc min minV) l) r
f' a k b = f a (w2i k) b
s = ($!)
-- TODO: make the conversion functions good producers
-- | /O(n)/.
-- Return all elements of the map in the ascending order of their keys.
-- Subject to list fusion.
--
-- > elems (fromList [(5,"a"), (3,"b")]) == ["b","a"]
-- > elems empty == []
elems :: IntMap a -> [a]
elems = foldr (:) []
-- | /O(n)/. Return all keys of the map in ascending order. Subject to list
-- fusion.
--
-- > keys (fromList [(5,"a"), (3,"b")]) == [3,5]
-- > keys empty == []
keys :: IntMap a -> [Key]
keys = foldrWithKey (\k _ l -> k : l) []
-- | /O(n)/. An alias for 'toAscList'. Returns all key\/value pairs in the
-- map in ascending key order. Subject to list fusion.
--
-- > assocs (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
-- > assocs empty == []
assocs :: IntMap a -> [(Key, a)]
assocs = toAscList
-- | /O(n*min(n,W))/. The set of all keys of the map.
--
-- > keysSet (fromList [(5,"a"), (3,"b")]) == Data.IntSet.fromList [3,5]
-- > keysSet empty == Data.IntSet.empty
keysSet :: IntMap a -> Data.IntSet.IntSet
keysSet = Data.IntSet.fromDistinctAscList . keys
-- | /O(n)/. Convert the map to a list of key\/value pairs.
toList :: IntMap a -> [(Key, a)]
toList = toAscList
-- | /O(n)/. Convert the map to a list of key\/value pairs where the
-- keys are in ascending order. Subject to list fusion.
--
-- > toAscList (fromList [(5,"a"), (3,"b")]) == [(3,"b"), (5,"a")]
toAscList :: IntMap a -> [(Key, a)]
toAscList = foldrWithKey (\k v l -> (k, v) : l) []
-- | /O(n)/. Convert the map to a list of key\/value pairs where the keys
-- are in descending order. Subject to list fusion.
--
-- > toDescList (fromList [(5,"a"), (3,"b")]) == [(5,"a"), (3,"b")]
toDescList :: IntMap a -> [(Key, a)]
toDescList = foldlWithKey (\l k v -> (k, v) : l) []
-- | /O(n)/. Filter all values that satisfy some predicate.
--
-- > filter (> "a") (fromList [(5,"a"), (3,"b")]) == singleton 3 "b"
-- > filter (> "x") (fromList [(5,"a"), (3,"b")]) == empty
-- > filter (< "a") (fromList [(5,"a"), (3,"b")]) == empty
filter :: (a -> Bool) -> IntMap a -> IntMap a
filter p (IntMap m) = IntMap (W.filter p m)
-- | /O(n)/. Filter all keys\/values that satisfy some predicate.
--
-- > filterWithKey (\k _ -> k > 4) (fromList [(5,"a"), (3,"b")]) == singleton 5 "a"
filterWithKey :: (Key -> a -> Bool) -> IntMap a -> IntMap a
filterWithKey p (IntMap m) = IntMap (W.filterWithKey (p . w2i) m)
-- | /O(n+m)/. The restriction of a map to the keys in a set.
--
-- @
-- m `restrictKeys` s = 'filterWithKey' (\k _ -> k `'IntSet.member'` s) m
-- @
--
-- @since 0.5.8
restrictKeys :: IntMap a -> Data.IntSet.IntSet -> IntMap a
restrictKeys m s = filterWithKey (\k _ -> Data.IntSet.member k s) m
-- | Remove all the keys in a given set from a map.
--
-- @
-- m `withoutKeys` s = 'filterWithKey' (\k _ -> k `'IntSet.notMember'` s) m
-- @
--
-- @since 0.5.8
withoutKeys :: IntMap a -> Data.IntSet.IntSet -> IntMap a
withoutKeys m s = filterWithKey (\k _ -> Data.IntSet.notMember k s) m
-- | /O(n)/. Partition the map according to some predicate. The first
-- map contains all elements that satisfy the predicate, the second all
-- elements that fail the predicate. See also 'split'.
--
-- > partition (> "a") (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", singleton 5 "a")
-- > partition (< "x") (fromList [(5,"a"), (3,"b")]) == (fromList [(3, "b"), (5, "a")], empty)
-- > partition (> "x") (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3, "b"), (5, "a")])
partition :: (a -> Bool) -> IntMap a -> (IntMap a, IntMap a)
partition p (IntMap m) = let (m1, m2) = W.partition p m in (IntMap m1, IntMap m2)
-- | /O(n)/. Partition the map according to some predicate. The first
-- map contains all elements that satisfy the predicate, the second all
-- elements that fail the predicate. See also 'split'.
--
-- > partitionWithKey (\ k _ -> k > 3) (fromList [(5,"a"), (3,"b")]) == (singleton 5 "a", singleton 3 "b")
-- > partitionWithKey (\ k _ -> k < 7) (fromList [(5,"a"), (3,"b")]) == (fromList [(3, "b"), (5, "a")], empty)
-- > partitionWithKey (\ k _ -> k > 7) (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3, "b"), (5, "a")])
partitionWithKey :: (Key -> a -> Bool) -> IntMap a -> (IntMap a, IntMap a)
partitionWithKey p (IntMap m) = let (m1, m2) = W.partitionWithKey (p . w2i) m in (IntMap m1, IntMap m2)
-- | /O(min(n,W))/. The expression (@'split' k map@) is a pair @(map1,map2)@
-- where all keys in @map1@ are lower than @k@ and all keys in
-- @map2@ larger than @k@. Any key equal to @k@ is found in neither @map1@ nor @map2@.
--
-- > split 2 (fromList [(5,"a"), (3,"b")]) == (empty, fromList [(3,"b"), (5,"a")])
-- > split 3 (fromList [(5,"a"), (3,"b")]) == (empty, singleton 5 "a")
-- > split 4 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", singleton 5 "a")
-- > split 5 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", empty)
-- > split 6 (fromList [(5,"a"), (3,"b")]) == (fromList [(3,"b"), (5,"a")], empty)
split :: Key -> IntMap a -> (IntMap a, IntMap a)
split k m
| k < 0 = let (glb, WordMap lub) = W.split (i2w k) (WordMap neg)
in (IntMap glb, IntMap (WordMap (W.binL nonneg (W.l2rMap lub))))
| otherwise = let (WordMap glb, lub) = W.split (i2w k) (WordMap nonneg)
in (IntMap (WordMap (W.binL glb (W.l2rMap neg))), IntMap lub)
where
(neg, nonneg) = split0 m
-- | /O(min(n,W))/. Performs a 'split' but also returns whether the pivot
-- key was found in the original map.
--
-- > splitLookup 2 (fromList [(5,"a"), (3,"b")]) == (empty, Nothing, fromList [(3,"b"), (5,"a")])
-- > splitLookup 3 (fromList [(5,"a"), (3,"b")]) == (empty, Just "b", singleton 5 "a")
-- > splitLookup 4 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", Nothing, singleton 5 "a")
-- > splitLookup 5 (fromList [(5,"a"), (3,"b")]) == (singleton 3 "b", Just "a", empty)
-- > splitLookup 6 (fromList [(5,"a"), (3,"b")]) == (fromList [(3,"b"), (5,"a")], Nothing, empty)
splitLookup :: Key -> IntMap a -> (IntMap a, Maybe a, IntMap a)
splitLookup k m
| k < 0 = let (glb, eq, WordMap lub) = W.splitLookup (i2w k) (WordMap neg)
in (IntMap glb, eq, IntMap (WordMap (W.binL nonneg (W.l2rMap lub))))
| otherwise = let (WordMap glb, eq, lub) = W.splitLookup (i2w k) (WordMap nonneg)
in (IntMap (WordMap (W.binL glb (W.l2rMap neg))), eq, IntMap lub)
where
(neg, nonneg) = split0 m
-- | /O(1)/. Decompose a map into pieces based on the structure of the underlying
-- tree. This function is useful for consuming a map in parallel.
--
-- No guarantee is made as to the sizes of the pieces; an internal, but
-- deterministic process determines this. However, it is guaranteed that the
-- pieces returned will be in ascending order (all elements in the first submap
-- less than all elements in the second, and so on).
--
-- Examples:
--
-- > splitRoot (fromList (zip [1..6::Int] ['a'..])) ==
-- > [fromList [(1,'a'),(2,'b'),(3,'c')],fromList [(4,'d'),(5,'e'),(6,'f')]]
--
-- > splitRoot empty == []
--
-- Note that the current implementation does not return more than two submaps,
-- but you should not depend on this behaviour because it can change in the
-- future without notice.
{-# INLINE splitRoot #-}
splitRoot :: IntMap a -> [IntMap a]
splitRoot (IntMap (WordMap Empty)) = []
splitRoot m@(IntMap (WordMap (NonEmpty _ _ Tip))) = [m]
splitRoot (IntMap (WordMap (NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = [IntMap (WordMap (W.r2lMap (NonEmpty max maxV r))), IntMap (WordMap (NonEmpty min minV l))]
| otherwise = [IntMap (WordMap (NonEmpty min minV l)), IntMap (WordMap (W.r2lMap (NonEmpty max maxV r)))]
-- | /O(n+m)/. Is this a submap?
-- Defined as (@'isSubmapOf' = 'isSubmapOfBy' (==)@).
isSubmapOf :: Eq a => IntMap a -> IntMap a -> Bool
IntMap m1 `isSubmapOf` IntMap m2 = m1 `W.isSubmapOf` m2
{- | /O(n+m)/.
The expression (@'isSubmapOfBy' f m1 m2@) returns 'True' if
all keys in @m1@ are in @m2@, and when @f@ returns 'True' when
applied to their respective values. For example, the following
expressions are all 'True':
> isSubmapOfBy (==) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (<=) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)])
But the following are all 'False':
> isSubmapOfBy (==) (fromList [(1,2)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (<) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1)])
-}
isSubmapOfBy :: (a -> b -> Bool) -> IntMap a -> IntMap b -> Bool
isSubmapOfBy f (IntMap m1) (IntMap m2) = W.isSubmapOfBy f m1 m2
-- | /O(n+m)/. Is this a proper submap? (ie. a submap but not equal).
-- Defined as (@'isProperSubmapOf' = 'isProperSubmapOfBy' (==)@).
isProperSubmapOf :: Eq a => IntMap a -> IntMap a -> Bool
isProperSubmapOf (IntMap m1) (IntMap m2) = W.isProperSubmapOf m1 m2
{- | /O(n+m)/. Is this a proper submap? (ie. a submap but not equal).
The expression (@'isProperSubmapOfBy' f m1 m2@) returns 'True' when
@m1@ and @m2@ are not equal,
all keys in @m1@ are in @m2@, and when @f@ returns 'True' when
applied to their respective values. For example, the following
expressions are all 'True':
> isProperSubmapOfBy (==) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
> isProperSubmapOfBy (<=) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
But the following are all 'False':
> isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1),(2,2)])
> isProperSubmapOfBy (==) (fromList [(1,1),(2,2)]) (fromList [(1,1)])
> isProperSubmapOfBy (<) (fromList [(1,1)]) (fromList [(1,1),(2,2)])
-}
isProperSubmapOfBy :: (a -> b -> Bool) -> IntMap a -> IntMap b -> Bool
isProperSubmapOfBy p (IntMap m1) (IntMap m2) = W.isProperSubmapOfBy p m1 m2
-- | /O(1)/. The minimal key of the map.
findMin :: IntMap a -> (Key, a)
findMin (IntMap (WordMap Empty)) = error "findMin: empty map has no minimal element"
findMin (IntMap (WordMap (NonEmpty min minV Tip))) = (w2i min, minV)
findMin (IntMap (WordMap (NonEmpty min minV (Bin max maxV _ r))))
| w2i (xor min max) < 0 = case r of
Tip -> (w2i max, maxV)
Bin min' minV' _ _ -> (w2i min', minV')
| otherwise = (w2i min, minV)
-- | /O(1)/. The maximal key of the map.
findMax :: IntMap a -> (Key, a)
findMax (IntMap (WordMap Empty)) = error "findMax: empty map has no maximal element"
findMax (IntMap (WordMap (NonEmpty min minV Tip))) = (w2i min, minV)
findMax (IntMap (WordMap (NonEmpty min minV (Bin max maxV l _))))
| w2i (xor min max) < 0 = case l of
Tip -> (w2i min, minV)
Bin max' maxV' _ _ -> (w2i max', maxV')
| otherwise = (w2i max, maxV)
-- | /O(min(n,W))/. Delete the minimal key. Returns an empty map if the map is empty.
--
-- Note that this is a change of behaviour for consistency with 'Data.Map.Map' –
-- versions prior to 0.5 threw an error if the 'IntMap' was already empty.
deleteMin :: IntMap a -> IntMap a
deleteMin (IntMap (WordMap Empty)) = IntMap (WordMap Empty)
deleteMin m = delete (fst (findMin m)) m
-- | /O(min(n,W))/. Delete the maximal key. Returns an empty map if the map is empty.
--
-- Note that this is a change of behaviour for consistency with 'Data.Map.Map' –
-- versions prior to 0.5 threw an error if the 'IntMap' was already empty.
deleteMax :: IntMap a -> IntMap a
deleteMax (IntMap (WordMap Empty)) = IntMap (WordMap Empty)
deleteMax m = delete (fst (findMax m)) m
-- | /O(min(n,W))/. Delete and find the minimal element.
deleteFindMin :: IntMap a -> ((Key, a), IntMap a)
deleteFindMin m = let (k, a) = findMin m
in ((k, a), delete k m)
-- | /O(min(n,W))/. Delete and find the maximal element.
deleteFindMax :: IntMap a -> ((Key, a), IntMap a)
deleteFindMax m = let (k, a) = findMax m
in ((k, a), delete k m)
-- | /O(min(n,W))/. Retrieves the minimal key of the map, and the map
-- stripped of that element, or 'Nothing' if passed an empty map.
minView :: IntMap a -> Maybe (a, IntMap a)
minView (IntMap (WordMap Empty)) = Nothing
minView m = let (k, a) = findMin m
in Just (a, delete k m)
-- | /O(min(n,W))/. Retrieves the maximal key of the map, and the map
-- stripped of that element, or 'Nothing' if passed an empty map.
maxView :: IntMap a -> Maybe (a, IntMap a)
maxView (IntMap (WordMap Empty)) = Nothing
maxView m = let (k, a) = findMax m
in Just (a, delete k m)
-- | /O(min(n,W))/. Retrieves the minimal (key,value) pair of the map, and
-- the map stripped of that element, or 'Nothing' if passed an empty map.
--
-- > minViewWithKey (fromList [(5,"a"), (3,"b")]) == Just ((3,"b"), singleton 5 "a")
-- > minViewWithKey empty == Nothing
minViewWithKey :: IntMap a -> Maybe ((Key, a), IntMap a)
minViewWithKey (IntMap (WordMap Empty)) = Nothing
minViewWithKey m = let (k, a) = findMin m
in Just ((k, a), delete k m)
-- | /O(min(n,W))/. Retrieves the maximal (key,value) pair of the map, and
-- the map stripped of that element, or 'Nothing' if passed an empty map.
--
-- > maxViewWithKey (fromList [(5,"a"), (3,"b")]) == Just ((5,"a"), singleton 3 "b")
-- > maxViewWithKey empty == Nothing
maxViewWithKey :: IntMap a -> Maybe ((Key, a), IntMap a)
maxViewWithKey (IntMap (WordMap Empty)) = Nothing
maxViewWithKey m = let (k, a) = findMax m
in Just ((k, a), delete k m)
----------------------------
-- | Show the tree that implements the map.
showTree :: Show a => IntMap a -> String
showTree (IntMap m) = W.showTree m
valid :: IntMap a -> Bool
valid (IntMap m) = W.valid m
----------------------
i2w :: Int -> Word
i2w = fromIntegral
w2i :: Word -> Int
w2i = fromIntegral
-- | /O(1)/. Split a map into its negative and nonnegative parts. For internal use only.
{-# INLINE split0 #-}
split0 :: IntMap a -> (WordMap_ W.L a, WordMap_ W.L a)
split0 (IntMap (WordMap Empty)) = (Empty, Empty)
split0 (IntMap (WordMap m@(NonEmpty min _ Tip)))
| w2i min < 0 = (m, Empty)
| otherwise = (Empty, m)
split0 (IntMap (WordMap m@(NonEmpty min minV (Bin max maxV l r))))
| w2i (xor min max) < 0 = (W.r2lMap (NonEmpty max maxV r), NonEmpty min minV l)
| w2i max < 0 = (m, Empty)
| otherwise = (Empty, m)
| gereeter/bounded-intmap | src/Data/IntMap/Bounded/Base.hs | mit | 36,007 | 0 | 21 | 9,033 | 11,227 | 5,682 | 5,545 | 442 | 9 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE ExistentialQuantification #-}
module Test.Hspec.Core.Formatters.V1.Monad (
Formatter(..)
, Item(..)
, Result(..)
, FailureReason (..)
, FormatM
, getSuccessCount
, getPendingCount
, getFailCount
, getTotalCount
, FailureRecord (..)
, getFailMessages
, usedSeed
, printTimes
, getCPUTime
, getRealTime
, write
, writeLine
, writeTransient
, withInfoColor
, withSuccessColor
, withPendingColor
, withFailColor
, useDiff
, extraChunk
, missingChunk
, Environment(..)
, interpretWith
) where
import Prelude ()
import Test.Hspec.Core.Compat
import Control.Monad.IO.Class
import Test.Hspec.Core.Formatters.V1.Free
import Test.Hspec.Core.Clock
import Test.Hspec.Core.Format
data Formatter = Formatter {
headerFormatter :: FormatM ()
-- | evaluated before each test group
, exampleGroupStarted :: [String] -> String -> FormatM ()
-- | evaluated after each test group
, exampleGroupDone :: FormatM ()
-- | evaluated before each example
, exampleStarted :: Path -> FormatM ()
-- | used to notify the progress of the currently evaluated example
, exampleProgress :: Path -> Progress -> FormatM ()
-- | evaluated after each successful example
, exampleSucceeded :: Path -> String -> FormatM ()
-- | evaluated after each failed example
, exampleFailed :: Path -> String -> FailureReason -> FormatM ()
-- | evaluated after each pending example
, examplePending :: Path -> String -> Maybe String -> FormatM ()
-- | evaluated after a test run
, failedFormatter :: FormatM ()
-- | evaluated after `failedFormatter`
, footerFormatter :: FormatM ()
}
data FailureRecord = FailureRecord {
failureRecordLocation :: Maybe Location
, failureRecordPath :: Path
, failureRecordMessage :: FailureReason
}
data FormatF next =
GetSuccessCount (Int -> next)
| GetPendingCount (Int -> next)
| GetFailMessages ([FailureRecord] -> next)
| UsedSeed (Integer -> next)
| PrintTimes (Bool -> next)
| GetCPUTime (Maybe Seconds -> next)
| GetRealTime (Seconds -> next)
| Write String next
| WriteTransient String next
| forall a. WithFailColor (FormatM a) (a -> next)
| forall a. WithSuccessColor (FormatM a) (a -> next)
| forall a. WithPendingColor (FormatM a) (a -> next)
| forall a. WithInfoColor (FormatM a) (a -> next)
| UseDiff (Bool -> next)
| ExtraChunk String next
| MissingChunk String next
| forall a. LiftIO (IO a) (a -> next)
instance Functor FormatF where -- deriving this instance would require GHC >= 7.10.1
fmap f x = case x of
GetSuccessCount next -> GetSuccessCount (fmap f next)
GetPendingCount next -> GetPendingCount (fmap f next)
GetFailMessages next -> GetFailMessages (fmap f next)
UsedSeed next -> UsedSeed (fmap f next)
PrintTimes next -> PrintTimes (fmap f next)
GetCPUTime next -> GetCPUTime (fmap f next)
GetRealTime next -> GetRealTime (fmap f next)
Write s next -> Write s (f next)
WriteTransient s next -> WriteTransient s (f next)
WithFailColor action next -> WithFailColor action (fmap f next)
WithSuccessColor action next -> WithSuccessColor action (fmap f next)
WithPendingColor action next -> WithPendingColor action (fmap f next)
WithInfoColor action next -> WithInfoColor action (fmap f next)
UseDiff next -> UseDiff (fmap f next)
ExtraChunk s next -> ExtraChunk s (f next)
MissingChunk s next -> MissingChunk s (f next)
LiftIO action next -> LiftIO action (fmap f next)
type FormatM = Free FormatF
instance MonadIO FormatM where
liftIO s = liftF (LiftIO s id)
data Environment m = Environment {
environmentGetSuccessCount :: m Int
, environmentGetPendingCount :: m Int
, environmentGetFailMessages :: m [FailureRecord]
, environmentUsedSeed :: m Integer
, environmentPrintTimes :: m Bool
, environmentGetCPUTime :: m (Maybe Seconds)
, environmentGetRealTime :: m Seconds
, environmentWrite :: String -> m ()
, environmentWriteTransient :: String -> m ()
, environmentWithFailColor :: forall a. m a -> m a
, environmentWithSuccessColor :: forall a. m a -> m a
, environmentWithPendingColor :: forall a. m a -> m a
, environmentWithInfoColor :: forall a. m a -> m a
, environmentUseDiff :: m Bool
, environmentExtraChunk :: String -> m ()
, environmentMissingChunk :: String -> m ()
, environmentLiftIO :: forall a. IO a -> m a
}
interpretWith :: forall m a. Monad m => Environment m -> FormatM a -> m a
interpretWith Environment{..} = go
where
go :: forall b. FormatM b -> m b
go m = case m of
Pure value -> return value
Free action -> case action of
GetSuccessCount next -> environmentGetSuccessCount >>= go . next
GetPendingCount next -> environmentGetPendingCount >>= go . next
GetFailMessages next -> environmentGetFailMessages >>= go . next
UsedSeed next -> environmentUsedSeed >>= go . next
PrintTimes next -> environmentPrintTimes >>= go . next
GetCPUTime next -> environmentGetCPUTime >>= go . next
GetRealTime next -> environmentGetRealTime >>= go . next
Write s next -> environmentWrite s >> go next
WriteTransient s next -> environmentWriteTransient s >> go next
WithFailColor inner next -> environmentWithFailColor (go inner) >>= go . next
WithSuccessColor inner next -> environmentWithSuccessColor (go inner) >>= go . next
WithPendingColor inner next -> environmentWithPendingColor (go inner) >>= go . next
WithInfoColor inner next -> environmentWithInfoColor (go inner) >>= go . next
UseDiff next -> environmentUseDiff >>= go . next
ExtraChunk s next -> environmentExtraChunk s >> go next
MissingChunk s next -> environmentMissingChunk s >> go next
LiftIO inner next -> environmentLiftIO inner >>= go . next
-- | Get the number of successful examples encountered so far.
getSuccessCount :: FormatM Int
getSuccessCount = liftF (GetSuccessCount id)
-- | Get the number of pending examples encountered so far.
getPendingCount :: FormatM Int
getPendingCount = liftF (GetPendingCount id)
-- | Get the number of failed examples encountered so far.
getFailCount :: FormatM Int
getFailCount = length <$> getFailMessages
-- | Get the total number of examples encountered so far.
getTotalCount :: FormatM Int
getTotalCount = sum <$> sequence [getSuccessCount, getFailCount, getPendingCount]
-- | Get the list of accumulated failure messages.
getFailMessages :: FormatM [FailureRecord]
getFailMessages = liftF (GetFailMessages id)
-- | The random seed that is used for QuickCheck.
usedSeed :: FormatM Integer
usedSeed = liftF (UsedSeed id)
-- | Return `True` if the user requested time reporting for individual spec
-- items, `False` otherwise.
printTimes :: FormatM Bool
printTimes = liftF (PrintTimes id)
-- | Get the used CPU time since the test run has been started.
getCPUTime :: FormatM (Maybe Seconds)
getCPUTime = liftF (GetCPUTime id)
-- | Get the passed real time since the test run has been started.
getRealTime :: FormatM Seconds
getRealTime = liftF (GetRealTime id)
-- | Append some output to the report.
write :: String -> FormatM ()
write s = liftF (Write s ())
-- | The same as `write`, but adds a newline character.
writeLine :: String -> FormatM ()
writeLine s = write s >> write "\n"
writeTransient :: String -> FormatM ()
writeTransient s = liftF (WriteTransient s ())
-- | Set output color to red, run given action, and finally restore the default
-- color.
withFailColor :: FormatM a -> FormatM a
withFailColor s = liftF (WithFailColor s id)
-- | Set output color to green, run given action, and finally restore the
-- default color.
withSuccessColor :: FormatM a -> FormatM a
withSuccessColor s = liftF (WithSuccessColor s id)
-- | Set output color to yellow, run given action, and finally restore the
-- default color.
withPendingColor :: FormatM a -> FormatM a
withPendingColor s = liftF (WithPendingColor s id)
-- | Set output color to cyan, run given action, and finally restore the
-- default color.
withInfoColor :: FormatM a -> FormatM a
withInfoColor s = liftF (WithInfoColor s id)
-- | Return `True` if the user requested colorized diffs, `False` otherwise.
useDiff :: FormatM Bool
useDiff = liftF (UseDiff id)
-- | Output given chunk in red.
extraChunk :: String -> FormatM ()
extraChunk s = liftF (ExtraChunk s ())
-- | Output given chunk in green.
missingChunk :: String -> FormatM ()
missingChunk s = liftF (MissingChunk s ())
| hspec/hspec | hspec-core/src/Test/Hspec/Core/Formatters/V1/Monad.hs | mit | 8,636 | 0 | 17 | 1,685 | 2,349 | 1,224 | 1,125 | 174 | 18 |
import Test.HUnit
import qualified Data.Text.Lazy as T
import qualified Data.Text as S
import qualified Data.Map.Strict as M
import MlgscTypes
import FastA
fastaInput = ">hdr1\naa\ntt\n>hdr2 len > 5\ngg\ncc\n>hdr3\ngaattc"
fastARecs = fastATextToRecords $ T.pack fastaInput
test1 = TestCase (assertEqual "FastA rec 1 hdr" (T.pack "hdr1") (FastA.header $ head fastARecs))
test2 = TestCase (assertEqual "FastA rec 1 seq" (T.pack "AATT") (FastA.sequence $ head fastARecs))
test3 = TestCase (assertEqual "FastA rec 2 hdr" (T.pack "hdr2 len > 5") (FastA.header $ fastARecs !! 1))
test4 = TestCase (assertEqual "FastA rec 2 seq" (T.pack "GGCC") (FastA.sequence $ fastARecs !! 1))
test5 = TestCase (assertEqual "FastA rec 3 hdr" (T.pack "hdr3") (FastA.header $ fastARecs !! 2))
test6 = TestCase (assertEqual "FastA rec 3 seq" (T.pack "GAATTC") (FastA.sequence $ fastARecs !! 2))
-- Check ID and OTU, by convention the first and second words of the header.
fastAInput2 = T.pack $ unlines [
">XAA01 Methanococcus",
"accgatgctaatgtagcatgcagcatatgcg",
"cgagcgatctagcacgagcatgcatg",
"cgcaggtcatcgagagtc",
">ZAB089 Archaeoglobus",
"cgatatcgagagcgatcatcatgcagcagcaggcat",
"gcagcatgcatgcncgatcggatgcatgcnngcatcga"
]
-- by now I have learned about the short syntax :-)
fastARecs2 = fastATextToRecords fastAInput2
record10 = fastARecs2 !! 0
record11 = fastARecs2 !! 1
test7 = "FastA ID" ~: (T.pack "XAA01") ~=? (FastA.fastAId record10)
test8 = "FastA OTU" ~: (T.pack "Methanococcus") ~=? (FastA.fastAOTU record10)
test9 = "FastA ID" ~: (T.pack "ZAB089") ~=? (FastA.fastAId record11)
test10 = "FastA OTU" ~: (T.pack "Archaeoglobus") ~=? (FastA.fastAOTU record11)
tests = TestList [
TestLabel "FastA parser" test1
, TestLabel "FastA parser" test2
, TestLabel "FastA parser" test3
, TestLabel "FastA parser" test4
, TestLabel "FastA parser" test5
, TestLabel "FastA parser" test6
, TestLabel "fastA header fields" test7
, TestLabel "fastA header fields" test8
, TestLabel "fastA header fields" test9
, TestLabel "fastA header fields" test10
]
main = do
runTestTT tests
| tjunier/mlgsc | test/TestFastA.hs | mit | 2,219 | 2 | 11 | 453 | 569 | 302 | 267 | 42 | 1 |
module Environment (
Environment(..),
getEnvironment,
getEnvironmentURI,
) where
import System.Environment (lookupEnv)
import Network.URI (URI, parseURI)
data Environment = Development | Production deriving (Eq)
instance Read Environment where
readsPrec _ e = case e of
"development" -> [(Development, "")]
"production" -> [(Production, "")]
getEnvironment :: IO Environment
getEnvironment = maybe Production read <$> lookupEnv "SCOTTY_ENV"
getEnvironmentURI :: String -> IO URI
getEnvironmentURI key = do
uri <- lookupEnv key >>= maybe (fail $ key ++ " is missing from environment") return
maybe (fail $ uri ++ " is an invalid uri") return $ parseURI uri
| keithduncan/chatterbox | src/common/Environment.hs | mit | 714 | 0 | 12 | 152 | 213 | 115 | 98 | 17 | 1 |
-- ColaDePrioridadConListas.hs
-- Implementación de las colas de prioridad mediante listas.
-- José A. Alonso Jiménez https://jaalonso.github.com
-- =====================================================================
module Tema_16.ColaDePrioridadConListas
(CPrioridad,
vacia, -- Ord a => CPrioridad a
inserta, -- Ord a => a -> CPrioridad a -> CPrioridad a
primero, -- Ord a => CPrioridad a -> a
resto, -- Ord a => CPrioridad a -> CPrioridad a
esVacia, -- Ord a => CPrioridad a -> Bool
valida -- Ord a => CPrioridad a -> Bool
) where
-- Colas de prioridad mediante listas.
newtype CPrioridad a = CP [a]
deriving (Eq, Show)
-- Ejemplo de cola de prioridad
-- λ> foldr inserta vacia [3,1,7,2,9]
-- CP [1,2,3,7,9]
-- (valida c) se verifica si c es una cola de prioridad válida. Por
-- ejemplo,
-- valida (CP [1,3,5]) == True
-- valida (CP [1,5,3]) == False
valida :: Ord a => CPrioridad a -> Bool
valida (CP xs) = ordenada xs
where ordenada (x:y:zs) = x <= y && ordenada (y:zs)
ordenada _ = True
-- vacia es la cola de prioridad vacía. Por ejemplo,
-- λ> vacia
-- CP []
vacia :: Ord a => CPrioridad a
vacia = CP []
-- (inserta x c) es la cola obtenida añadiendo el elemento x a la cola
-- de prioridad c. Por ejemplo,
-- λ> inserta 5 (foldr inserta vacia [3,1,7,2,9])
-- CP [1,2,3,5,7,9]
inserta :: Ord a => a -> CPrioridad a -> CPrioridad a
inserta x (CP q) = CP (ins x q)
where ins y [] = [y]
ins y r@(e:r') | y < e = y:r
| otherwise = e:ins y r'
-- (primero c) es el primer elemento de la cola de prioridad c. Por
-- ejemplo,
-- primero (foldr inserta vacia [3,1,7,2,9]) == 1
primero :: Ord a => CPrioridad a -> a
primero (CP(x:_)) = x
primero _ = error "primero: cola de prioridad vacia"
-- (resto c) es la cola de prioridad obtenida eliminando el primer
-- elemento de la cola de prioridad c. Por ejemplo,
-- resto (foldr inserta vacia [3,1,7,2,9]) == CP [2,3,7,9]
resto :: Ord a => CPrioridad a -> CPrioridad a
resto (CP (_:xs)) = CP xs
resto _ = error "resto: cola de prioridad vacia"
-- (esVacia c) se verifica si la cola de prioridad c es vacía. Por
-- ejemplo,
-- esVacia (foldr inserta vacia [3,1,7,2,9]) == False
-- esVacia vacia == True
esVacia :: Ord a => CPrioridad a -> Bool
esVacia (CP xs) = null xs
| jaalonso/I1M-Cod-Temas | src/Tema_16/ColaDePrioridadConListas.hs | gpl-2.0 | 2,405 | 0 | 10 | 593 | 461 | 249 | 212 | 29 | 2 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module String_Matching.Config where
import Autolib.TES.Identifier
import Autolib.Set
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Ord a => Config a =
Config { alphabet :: Set a
, word_length :: Int
, take_best_of :: Int
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Config])
example :: Config Identifier
example = Config
{ alphabet = mkSet [ read "a", read "b" ]
, word_length = 12
, take_best_of = 5
}
-- local variables:
-- mode: haskell
-- end:
| Erdwolf/autotool-bonn | src/String_Matching/Config.hs | gpl-2.0 | 627 | 0 | 9 | 169 | 152 | 89 | 63 | 18 | 1 |
module Set_1 (set1ch1) where
import Data.ByteString
--newtype Base64 x = ByteString x
newtype Base16 x = ByteString x
--ch1input :: Base16
--ch1input = BS.pack "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d"
set1ch1 :: String
set1ch1 = "dogs" | Marcus-Rosti/matasano | src/set_1/Set_1.hs | gpl-3.0 | 298 | 0 | 5 | 35 | 36 | 24 | 12 | 5 | 1 |
module Texture.SphericalHarmonics
( -- * Hyper Spherical Harmonics
module Texture.SH.Harmonics
-- * Rotation and Symmetry
, module Texture.SH.Rotation
-- * Data Structure for SH
, module Texture.SH.Pyramid
-- * SH base functions
, module Texture.SH.SupportFunctions
) where
import Texture.SH.Harmonics
import Texture.SH.Rotation
import Texture.SH.Pyramid
import Texture.SH.SupportFunctions
| lostbean/sphermonics | src/Texture/SphericalHarmonics.hs | gpl-3.0 | 462 | 0 | 5 | 117 | 64 | 45 | 19 | 10 | 0 |
module Schema.Version where
import Data.Int (Int64)
schemaVersion :: Int64
schemaVersion = 29
| merijn/GPU-benchmarks | benchmark-analysis/src/Schema/Version.hs | gpl-3.0 | 96 | 0 | 5 | 14 | 26 | 16 | 10 | 4 | 1 |
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2010-2012 John Millikin <[email protected]>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
module DBusTests.InterfaceName (test_InterfaceName) where
import Test.Chell
import Test.Chell.QuickCheck
import Test.QuickCheck hiding (property)
import Data.List (intercalate)
import DBus
import DBusTests.Util
test_InterfaceName :: Suite
test_InterfaceName = suite "InterfaceName"
test_Parse
test_ParseInvalid
test_IsVariant
test_Parse :: Test
test_Parse = property "parse" prop where
prop = forAll gen_InterfaceName check
check x = case parseInterfaceName x of
Nothing -> False
Just parsed -> formatInterfaceName parsed == x
test_ParseInvalid :: Test
test_ParseInvalid = assertions "parse-invalid" $ do
-- empty
$expect (nothing (parseInterfaceName ""))
-- one element
$expect (nothing (parseInterfaceName "foo"))
-- element starting with a digit
$expect (nothing (parseInterfaceName "foo.0bar"))
-- trailing characters
$expect (nothing (parseInterfaceName "foo.bar!"))
-- at most 255 characters
$expect (just (parseInterfaceName ("f." ++ replicate 252 'y')))
$expect (just (parseInterfaceName ("f." ++ replicate 253 'y')))
$expect (nothing (parseInterfaceName ("f." ++ replicate 254 'y')))
test_IsVariant :: Test
test_IsVariant = assertions "IsVariant" $ do
assertVariant TypeString (interfaceName_ "foo.bar")
gen_InterfaceName :: Gen String
gen_InterfaceName = trim chunks where
alpha = ['a'..'z'] ++ ['A'..'Z'] ++ "_"
alphanum = alpha ++ ['0'..'9']
trim gen = do
x <- gen
if length x > 255
then return (dropWhileEnd (== '.') (take 255 x))
else return x
chunks = do
x <- chunk
xs <- listOf1 chunk
return (intercalate "." (x:xs))
chunk = do
x <- elements alpha
xs <- listOf (elements alphanum)
return (x:xs)
instance Arbitrary InterfaceName where
arbitrary = fmap interfaceName_ gen_InterfaceName
| tmishima/haskell-dbus | tests/DBusTests/InterfaceName.hs | gpl-3.0 | 2,562 | 16 | 15 | 478 | 606 | 308 | 298 | 50 | 2 |
import qualified NS3473.Concrete as M
import qualified NS3473.Rebars as R
import qualified NS3473.Columns as C
import qualified NS3473.Buckling as X
import qualified NS3473Column.System as S
rebar = S.createRebarCollection 20 2
conc = M.newConc "35"
-- co = S.createColumn 300 300 4500 "2.0" conc rebar
co = S.createColumn 300 300 4500 "2.0" conc rebar
runx = S.runSystem co 100 45
| baalbek/ns3473column | demo/demo1.hs | gpl-3.0 | 391 | 0 | 6 | 68 | 98 | 59 | 39 | 9 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Healthcare.Projects.Locations.DataSets.FhirStores.GetIAMPolicy
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ <https://cloud.google.com/healthcare Cloud Healthcare API Reference> for @healthcare.projects.locations.datasets.fhirStores.getIamPolicy@.
module Network.Google.Resource.Healthcare.Projects.Locations.DataSets.FhirStores.GetIAMPolicy
(
-- * REST Resource
ProjectsLocationsDataSetsFhirStoresGetIAMPolicyResource
-- * Creating a Request
, projectsLocationsDataSetsFhirStoresGetIAMPolicy
, ProjectsLocationsDataSetsFhirStoresGetIAMPolicy
-- * Request Lenses
, pldsfsgipOptionsRequestedPolicyVersion
, pldsfsgipXgafv
, pldsfsgipUploadProtocol
, pldsfsgipAccessToken
, pldsfsgipUploadType
, pldsfsgipResource
, pldsfsgipCallback
) where
import Network.Google.Healthcare.Types
import Network.Google.Prelude
-- | A resource alias for @healthcare.projects.locations.datasets.fhirStores.getIamPolicy@ method which the
-- 'ProjectsLocationsDataSetsFhirStoresGetIAMPolicy' request conforms to.
type ProjectsLocationsDataSetsFhirStoresGetIAMPolicyResource
=
"v1" :>
CaptureMode "resource" "getIamPolicy" Text :>
QueryParam "options.requestedPolicyVersion"
(Textual Int32)
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Get '[JSON] Policy
-- | Gets the access control policy for a resource. Returns an empty policy
-- if the resource exists and does not have a policy set.
--
-- /See:/ 'projectsLocationsDataSetsFhirStoresGetIAMPolicy' smart constructor.
data ProjectsLocationsDataSetsFhirStoresGetIAMPolicy =
ProjectsLocationsDataSetsFhirStoresGetIAMPolicy'
{ _pldsfsgipOptionsRequestedPolicyVersion :: !(Maybe (Textual Int32))
, _pldsfsgipXgafv :: !(Maybe Xgafv)
, _pldsfsgipUploadProtocol :: !(Maybe Text)
, _pldsfsgipAccessToken :: !(Maybe Text)
, _pldsfsgipUploadType :: !(Maybe Text)
, _pldsfsgipResource :: !Text
, _pldsfsgipCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsLocationsDataSetsFhirStoresGetIAMPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pldsfsgipOptionsRequestedPolicyVersion'
--
-- * 'pldsfsgipXgafv'
--
-- * 'pldsfsgipUploadProtocol'
--
-- * 'pldsfsgipAccessToken'
--
-- * 'pldsfsgipUploadType'
--
-- * 'pldsfsgipResource'
--
-- * 'pldsfsgipCallback'
projectsLocationsDataSetsFhirStoresGetIAMPolicy
:: Text -- ^ 'pldsfsgipResource'
-> ProjectsLocationsDataSetsFhirStoresGetIAMPolicy
projectsLocationsDataSetsFhirStoresGetIAMPolicy pPldsfsgipResource_ =
ProjectsLocationsDataSetsFhirStoresGetIAMPolicy'
{ _pldsfsgipOptionsRequestedPolicyVersion = Nothing
, _pldsfsgipXgafv = Nothing
, _pldsfsgipUploadProtocol = Nothing
, _pldsfsgipAccessToken = Nothing
, _pldsfsgipUploadType = Nothing
, _pldsfsgipResource = pPldsfsgipResource_
, _pldsfsgipCallback = Nothing
}
-- | Optional. The policy format version to be returned. Valid values are 0,
-- 1, and 3. Requests specifying an invalid value will be rejected.
-- Requests for policies with any conditional bindings must specify version
-- 3. Policies without any conditional bindings may specify any valid value
-- or leave the field unset. To learn which resources support conditions in
-- their IAM policies, see the [IAM
-- documentation](https:\/\/cloud.google.com\/iam\/help\/conditions\/resource-policies).
pldsfsgipOptionsRequestedPolicyVersion :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy (Maybe Int32)
pldsfsgipOptionsRequestedPolicyVersion
= lens _pldsfsgipOptionsRequestedPolicyVersion
(\ s a ->
s{_pldsfsgipOptionsRequestedPolicyVersion = a})
. mapping _Coerce
-- | V1 error format.
pldsfsgipXgafv :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy (Maybe Xgafv)
pldsfsgipXgafv
= lens _pldsfsgipXgafv
(\ s a -> s{_pldsfsgipXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pldsfsgipUploadProtocol :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy (Maybe Text)
pldsfsgipUploadProtocol
= lens _pldsfsgipUploadProtocol
(\ s a -> s{_pldsfsgipUploadProtocol = a})
-- | OAuth access token.
pldsfsgipAccessToken :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy (Maybe Text)
pldsfsgipAccessToken
= lens _pldsfsgipAccessToken
(\ s a -> s{_pldsfsgipAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pldsfsgipUploadType :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy (Maybe Text)
pldsfsgipUploadType
= lens _pldsfsgipUploadType
(\ s a -> s{_pldsfsgipUploadType = a})
-- | REQUIRED: The resource for which the policy is being requested. See the
-- operation documentation for the appropriate value for this field.
pldsfsgipResource :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy Text
pldsfsgipResource
= lens _pldsfsgipResource
(\ s a -> s{_pldsfsgipResource = a})
-- | JSONP
pldsfsgipCallback :: Lens' ProjectsLocationsDataSetsFhirStoresGetIAMPolicy (Maybe Text)
pldsfsgipCallback
= lens _pldsfsgipCallback
(\ s a -> s{_pldsfsgipCallback = a})
instance GoogleRequest
ProjectsLocationsDataSetsFhirStoresGetIAMPolicy
where
type Rs
ProjectsLocationsDataSetsFhirStoresGetIAMPolicy
= Policy
type Scopes
ProjectsLocationsDataSetsFhirStoresGetIAMPolicy
= '["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsLocationsDataSetsFhirStoresGetIAMPolicy'{..}
= go _pldsfsgipResource
_pldsfsgipOptionsRequestedPolicyVersion
_pldsfsgipXgafv
_pldsfsgipUploadProtocol
_pldsfsgipAccessToken
_pldsfsgipUploadType
_pldsfsgipCallback
(Just AltJSON)
healthcareService
where go
= buildClient
(Proxy ::
Proxy
ProjectsLocationsDataSetsFhirStoresGetIAMPolicyResource)
mempty
| brendanhay/gogol | gogol-healthcare/gen/Network/Google/Resource/Healthcare/Projects/Locations/DataSets/FhirStores/GetIAMPolicy.hs | mpl-2.0 | 7,325 | 0 | 16 | 1,463 | 807 | 472 | 335 | 129 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.DescribeSpotPriceHistory
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Describes the Spot Price history. The prices returned are listed in
-- chronological order, from the oldest to the most recent, for up to the past
-- 90 days. For more information, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-spot-instances-history.html Spot Instance Pricing History> in the /Amazon Elastic Compute Cloud User Guide/.
--
-- When you specify a start and end time, this operation returns the prices of
-- the instance types within the time range that you specified and the time when
-- the price changed. The price is valid within the time period that you
-- specified; the response merely indicates the last time that the price changed.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeSpotPriceHistory.html>
module Network.AWS.EC2.DescribeSpotPriceHistory
(
-- * Request
DescribeSpotPriceHistory
-- ** Request constructor
, describeSpotPriceHistory
-- ** Request lenses
, dsphAvailabilityZone
, dsphDryRun
, dsphEndTime
, dsphFilters
, dsphInstanceTypes
, dsphMaxResults
, dsphNextToken
, dsphProductDescriptions
, dsphStartTime
-- * Response
, DescribeSpotPriceHistoryResponse
-- ** Response constructor
, describeSpotPriceHistoryResponse
-- ** Response lenses
, dsphrNextToken
, dsphrSpotPriceHistory
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data DescribeSpotPriceHistory = DescribeSpotPriceHistory
{ _dsphAvailabilityZone :: Maybe Text
, _dsphDryRun :: Maybe Bool
, _dsphEndTime :: Maybe ISO8601
, _dsphFilters :: List "Filter" Filter
, _dsphInstanceTypes :: List "InstanceType" InstanceType
, _dsphMaxResults :: Maybe Int
, _dsphNextToken :: Maybe Text
, _dsphProductDescriptions :: List "ProductDescription" Text
, _dsphStartTime :: Maybe ISO8601
} deriving (Eq, Read, Show)
-- | 'DescribeSpotPriceHistory' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dsphAvailabilityZone' @::@ 'Maybe' 'Text'
--
-- * 'dsphDryRun' @::@ 'Maybe' 'Bool'
--
-- * 'dsphEndTime' @::@ 'Maybe' 'UTCTime'
--
-- * 'dsphFilters' @::@ ['Filter']
--
-- * 'dsphInstanceTypes' @::@ ['InstanceType']
--
-- * 'dsphMaxResults' @::@ 'Maybe' 'Int'
--
-- * 'dsphNextToken' @::@ 'Maybe' 'Text'
--
-- * 'dsphProductDescriptions' @::@ ['Text']
--
-- * 'dsphStartTime' @::@ 'Maybe' 'UTCTime'
--
describeSpotPriceHistory :: DescribeSpotPriceHistory
describeSpotPriceHistory = DescribeSpotPriceHistory
{ _dsphDryRun = Nothing
, _dsphStartTime = Nothing
, _dsphEndTime = Nothing
, _dsphInstanceTypes = mempty
, _dsphProductDescriptions = mempty
, _dsphFilters = mempty
, _dsphAvailabilityZone = Nothing
, _dsphMaxResults = Nothing
, _dsphNextToken = Nothing
}
-- | Filters the results by the specified Availability Zone.
dsphAvailabilityZone :: Lens' DescribeSpotPriceHistory (Maybe Text)
dsphAvailabilityZone =
lens _dsphAvailabilityZone (\s a -> s { _dsphAvailabilityZone = a })
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have the
-- required permissions, the error response is 'DryRunOperation'. Otherwise, it is 'UnauthorizedOperation'.
dsphDryRun :: Lens' DescribeSpotPriceHistory (Maybe Bool)
dsphDryRun = lens _dsphDryRun (\s a -> s { _dsphDryRun = a })
-- | The date and time, up to the current date, from which to stop retrieving the
-- price history data, in UTC format (for example, /YYYY/-/MM/-/DD/T/HH/:/MM/:/SS/Z).
dsphEndTime :: Lens' DescribeSpotPriceHistory (Maybe UTCTime)
dsphEndTime = lens _dsphEndTime (\s a -> s { _dsphEndTime = a }) . mapping _Time
-- | One or more filters.
--
-- 'availability-zone' - The Availability Zone for which prices should be
-- returned.
--
-- 'instance-type' - The type of instance (for example, 'm1.small').
--
-- 'product-description' - The product description for the Spot Price ('Linux/UNIX' | 'SUSE Linux' | 'Windows' | 'Linux/UNIX (Amazon VPC)' | 'SUSE Linux (Amazon VPC)' | 'Windows (Amazon VPC)').
--
-- 'spot-price' - The Spot Price. The value must match exactly (or use
-- wildcards; greater than or less than comparison is not supported).
--
-- 'timestamp' - The timestamp of the Spot Price history, in UTC format (for
-- example, /YYYY/-/MM/-/DD/T/HH/:/MM/:/SS/Z). You can use wildcards (* and ?). Greater than
-- or less than comparison is not supported.
--
--
dsphFilters :: Lens' DescribeSpotPriceHistory [Filter]
dsphFilters = lens _dsphFilters (\s a -> s { _dsphFilters = a }) . _List
-- | Filters the results by the specified instance types.
dsphInstanceTypes :: Lens' DescribeSpotPriceHistory [InstanceType]
dsphInstanceTypes =
lens _dsphInstanceTypes (\s a -> s { _dsphInstanceTypes = a })
. _List
-- | The maximum number of results to return in a single call. Specify a value
-- between 1 and 1000. The default value is 1000. To retrieve the remaining
-- results, make another call with the returned 'NextToken' value.
dsphMaxResults :: Lens' DescribeSpotPriceHistory (Maybe Int)
dsphMaxResults = lens _dsphMaxResults (\s a -> s { _dsphMaxResults = a })
-- | The token for the next set of results.
dsphNextToken :: Lens' DescribeSpotPriceHistory (Maybe Text)
dsphNextToken = lens _dsphNextToken (\s a -> s { _dsphNextToken = a })
-- | Filters the results by the specified basic product descriptions.
dsphProductDescriptions :: Lens' DescribeSpotPriceHistory [Text]
dsphProductDescriptions =
lens _dsphProductDescriptions (\s a -> s { _dsphProductDescriptions = a })
. _List
-- | The date and time, up to the past 90 days, from which to start retrieving the
-- price history data, in UTC format (for example, /YYYY/-/MM/-/DD/T/HH/:/MM/:/SS/Z).
dsphStartTime :: Lens' DescribeSpotPriceHistory (Maybe UTCTime)
dsphStartTime = lens _dsphStartTime (\s a -> s { _dsphStartTime = a }) . mapping _Time
data DescribeSpotPriceHistoryResponse = DescribeSpotPriceHistoryResponse
{ _dsphrNextToken :: Maybe Text
, _dsphrSpotPriceHistory :: List "item" SpotPrice
} deriving (Eq, Read, Show)
-- | 'DescribeSpotPriceHistoryResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dsphrNextToken' @::@ 'Maybe' 'Text'
--
-- * 'dsphrSpotPriceHistory' @::@ ['SpotPrice']
--
describeSpotPriceHistoryResponse :: DescribeSpotPriceHistoryResponse
describeSpotPriceHistoryResponse = DescribeSpotPriceHistoryResponse
{ _dsphrSpotPriceHistory = mempty
, _dsphrNextToken = Nothing
}
-- | The token required to retrieve the next set of results. This value is 'null'
-- when there are no more results to return.
dsphrNextToken :: Lens' DescribeSpotPriceHistoryResponse (Maybe Text)
dsphrNextToken = lens _dsphrNextToken (\s a -> s { _dsphrNextToken = a })
-- | The historical Spot Prices.
dsphrSpotPriceHistory :: Lens' DescribeSpotPriceHistoryResponse [SpotPrice]
dsphrSpotPriceHistory =
lens _dsphrSpotPriceHistory (\s a -> s { _dsphrSpotPriceHistory = a })
. _List
instance ToPath DescribeSpotPriceHistory where
toPath = const "/"
instance ToQuery DescribeSpotPriceHistory where
toQuery DescribeSpotPriceHistory{..} = mconcat
[ "AvailabilityZone" =? _dsphAvailabilityZone
, "DryRun" =? _dsphDryRun
, "EndTime" =? _dsphEndTime
, "Filter" `toQueryList` _dsphFilters
, "InstanceType" `toQueryList` _dsphInstanceTypes
, "MaxResults" =? _dsphMaxResults
, "NextToken" =? _dsphNextToken
, "ProductDescription" `toQueryList` _dsphProductDescriptions
, "StartTime" =? _dsphStartTime
]
instance ToHeaders DescribeSpotPriceHistory
instance AWSRequest DescribeSpotPriceHistory where
type Sv DescribeSpotPriceHistory = EC2
type Rs DescribeSpotPriceHistory = DescribeSpotPriceHistoryResponse
request = post "DescribeSpotPriceHistory"
response = xmlResponse
instance FromXML DescribeSpotPriceHistoryResponse where
parseXML x = DescribeSpotPriceHistoryResponse
<$> x .@? "nextToken"
<*> x .@? "spotPriceHistorySet" .!@ mempty
instance AWSPager DescribeSpotPriceHistory where
page rq rs
| stop (rs ^. dsphrNextToken) = Nothing
| otherwise = (\x -> rq & dsphNextToken ?~ x)
<$> (rs ^. dsphrNextToken)
| romanb/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeSpotPriceHistory.hs | mpl-2.0 | 9,724 | 0 | 11 | 2,019 | 1,191 | 712 | 479 | 118 | 1 |
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.MachineLearning
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Definition of the public APIs exposed by Amazon Machine Learning
--
-- /See:/ <http://docs.aws.amazon.com/machine-learning/latest/APIReference/Welcome.html AWS API Reference>
module Network.AWS.MachineLearning
(
-- * Service Configuration
machineLearning
-- * Errors
-- $errors
-- ** InternalServerException
, _InternalServerException
-- ** InvalidInputException
, _InvalidInputException
-- ** IdempotentParameterMismatchException
, _IdempotentParameterMismatchException
-- ** PredictorNotMountedException
, _PredictorNotMountedException
-- ** ResourceNotFoundException
, _ResourceNotFoundException
-- ** LimitExceededException
, _LimitExceededException
-- * Waiters
-- $waiters
-- ** MLModelAvailable
, mLModelAvailable
-- ** BatchPredictionAvailable
, batchPredictionAvailable
-- ** DataSourceAvailable
, dataSourceAvailable
-- ** EvaluationAvailable
, evaluationAvailable
-- * Operations
-- $operations
-- ** UpdateDataSource
, module Network.AWS.MachineLearning.UpdateDataSource
-- ** DeleteDataSource
, module Network.AWS.MachineLearning.DeleteDataSource
-- ** CreateDataSourceFromRedshift
, module Network.AWS.MachineLearning.CreateDataSourceFromRedshift
-- ** CreateDataSourceFromS3
, module Network.AWS.MachineLearning.CreateDataSourceFromS3
-- ** CreateMLModel
, module Network.AWS.MachineLearning.CreateMLModel
-- ** DeleteBatchPrediction
, module Network.AWS.MachineLearning.DeleteBatchPrediction
-- ** UpdateBatchPrediction
, module Network.AWS.MachineLearning.UpdateBatchPrediction
-- ** GetMLModel
, module Network.AWS.MachineLearning.GetMLModel
-- ** GetDataSource
, module Network.AWS.MachineLearning.GetDataSource
-- ** UpdateEvaluation
, module Network.AWS.MachineLearning.UpdateEvaluation
-- ** DeleteEvaluation
, module Network.AWS.MachineLearning.DeleteEvaluation
-- ** DeleteMLModel
, module Network.AWS.MachineLearning.DeleteMLModel
-- ** UpdateMLModel
, module Network.AWS.MachineLearning.UpdateMLModel
-- ** GetBatchPrediction
, module Network.AWS.MachineLearning.GetBatchPrediction
-- ** DescribeBatchPredictions (Paginated)
, module Network.AWS.MachineLearning.DescribeBatchPredictions
-- ** CreateDataSourceFromRDS
, module Network.AWS.MachineLearning.CreateDataSourceFromRDS
-- ** CreateEvaluation
, module Network.AWS.MachineLearning.CreateEvaluation
-- ** Predict
, module Network.AWS.MachineLearning.Predict
-- ** DeleteRealtimeEndpoint
, module Network.AWS.MachineLearning.DeleteRealtimeEndpoint
-- ** CreateBatchPrediction
, module Network.AWS.MachineLearning.CreateBatchPrediction
-- ** GetEvaluation
, module Network.AWS.MachineLearning.GetEvaluation
-- ** DescribeEvaluations (Paginated)
, module Network.AWS.MachineLearning.DescribeEvaluations
-- ** CreateRealtimeEndpoint
, module Network.AWS.MachineLearning.CreateRealtimeEndpoint
-- ** DescribeMLModels (Paginated)
, module Network.AWS.MachineLearning.DescribeMLModels
-- ** DescribeDataSources (Paginated)
, module Network.AWS.MachineLearning.DescribeDataSources
-- * Types
-- ** Algorithm
, Algorithm (..)
-- ** BatchPredictionFilterVariable
, BatchPredictionFilterVariable (..)
-- ** DataSourceFilterVariable
, DataSourceFilterVariable (..)
-- ** DetailsAttributes
, DetailsAttributes (..)
-- ** EntityStatus
, EntityStatus (..)
-- ** EvaluationFilterVariable
, EvaluationFilterVariable (..)
-- ** MLModelFilterVariable
, MLModelFilterVariable (..)
-- ** MLModelType
, MLModelType (..)
-- ** RealtimeEndpointStatus
, RealtimeEndpointStatus (..)
-- ** SortOrder
, SortOrder (..)
-- ** BatchPrediction
, BatchPrediction
, batchPrediction
, bpStatus
, bpLastUpdatedAt
, bpCreatedAt
, bpInputDataLocationS3
, bpMLModelId
, bpBatchPredictionDataSourceId
, bpBatchPredictionId
, bpCreatedByIAMUser
, bpName
, bpMessage
, bpOutputURI
-- ** DataSource
, DataSource
, dataSource
, dsStatus
, dsNumberOfFiles
, dsLastUpdatedAt
, dsCreatedAt
, dsDataSourceId
, dsRDSMetadata
, dsDataSizeInBytes
, dsCreatedByIAMUser
, dsName
, dsDataLocationS3
, dsComputeStatistics
, dsMessage
, dsRedshiftMetadata
, dsDataRearrangement
, dsRoleARN
-- ** Evaluation
, Evaluation
, evaluation
, eStatus
, ePerformanceMetrics
, eLastUpdatedAt
, eCreatedAt
, eInputDataLocationS3
, eMLModelId
, eCreatedByIAMUser
, eName
, eEvaluationId
, eMessage
, eEvaluationDataSourceId
-- ** MLModel
, MLModel
, mLModel
, mlmStatus
, mlmLastUpdatedAt
, mlmTrainingParameters
, mlmScoreThresholdLastUpdatedAt
, mlmCreatedAt
, mlmInputDataLocationS3
, mlmMLModelId
, mlmSizeInBytes
, mlmScoreThreshold
, mlmAlgorithm
, mlmCreatedByIAMUser
, mlmName
, mlmEndpointInfo
, mlmTrainingDataSourceId
, mlmMessage
, mlmMLModelType
-- ** PerformanceMetrics
, PerformanceMetrics
, performanceMetrics
, pmProperties
-- ** Prediction
, Prediction
, prediction
, pPredictedValue
, pPredictedLabel
, pPredictedScores
, pDetails
-- ** RDSDataSpec
, RDSDataSpec
, rdsDataSpec
, rdsdsDataSchemaURI
, rdsdsDataSchema
, rdsdsDataRearrangement
, rdsdsDatabaseInformation
, rdsdsSelectSqlQuery
, rdsdsDatabaseCredentials
, rdsdsS3StagingLocation
, rdsdsResourceRole
, rdsdsServiceRole
, rdsdsSubnetId
, rdsdsSecurityGroupIds
-- ** RDSDatabase
, RDSDatabase
, rdsDatabase
, rdsdInstanceIdentifier
, rdsdDatabaseName
-- ** RDSDatabaseCredentials
, RDSDatabaseCredentials
, rdsDatabaseCredentials
, rdsdcUsername
, rdsdcPassword
-- ** RDSMetadata
, RDSMetadata
, rdsMetadata
, rmSelectSqlQuery
, rmDataPipelineId
, rmDatabase
, rmDatabaseUserName
, rmResourceRole
, rmServiceRole
-- ** RealtimeEndpointInfo
, RealtimeEndpointInfo
, realtimeEndpointInfo
, reiCreatedAt
, reiEndpointURL
, reiEndpointStatus
, reiPeakRequestsPerSecond
-- ** RedshiftDataSpec
, RedshiftDataSpec
, redshiftDataSpec
, rDataSchemaURI
, rDataSchema
, rDataRearrangement
, rDatabaseInformation
, rSelectSqlQuery
, rDatabaseCredentials
, rS3StagingLocation
-- ** RedshiftDatabase
, RedshiftDatabase
, redshiftDatabase
, rdDatabaseName
, rdClusterIdentifier
-- ** RedshiftDatabaseCredentials
, RedshiftDatabaseCredentials
, redshiftDatabaseCredentials
, rdcUsername
, rdcPassword
-- ** RedshiftMetadata
, RedshiftMetadata
, redshiftMetadata
, redSelectSqlQuery
, redRedshiftDatabase
, redDatabaseUserName
-- ** S3DataSpec
, S3DataSpec
, s3DataSpec
, sdsDataSchema
, sdsDataSchemaLocationS3
, sdsDataRearrangement
, sdsDataLocationS3
) where
import Network.AWS.MachineLearning.CreateBatchPrediction
import Network.AWS.MachineLearning.CreateDataSourceFromRDS
import Network.AWS.MachineLearning.CreateDataSourceFromRedshift
import Network.AWS.MachineLearning.CreateDataSourceFromS3
import Network.AWS.MachineLearning.CreateEvaluation
import Network.AWS.MachineLearning.CreateMLModel
import Network.AWS.MachineLearning.CreateRealtimeEndpoint
import Network.AWS.MachineLearning.DeleteBatchPrediction
import Network.AWS.MachineLearning.DeleteDataSource
import Network.AWS.MachineLearning.DeleteEvaluation
import Network.AWS.MachineLearning.DeleteMLModel
import Network.AWS.MachineLearning.DeleteRealtimeEndpoint
import Network.AWS.MachineLearning.DescribeBatchPredictions
import Network.AWS.MachineLearning.DescribeDataSources
import Network.AWS.MachineLearning.DescribeEvaluations
import Network.AWS.MachineLearning.DescribeMLModels
import Network.AWS.MachineLearning.GetBatchPrediction
import Network.AWS.MachineLearning.GetDataSource
import Network.AWS.MachineLearning.GetEvaluation
import Network.AWS.MachineLearning.GetMLModel
import Network.AWS.MachineLearning.Predict
import Network.AWS.MachineLearning.Types
import Network.AWS.MachineLearning.UpdateBatchPrediction
import Network.AWS.MachineLearning.UpdateDataSource
import Network.AWS.MachineLearning.UpdateEvaluation
import Network.AWS.MachineLearning.UpdateMLModel
import Network.AWS.MachineLearning.Waiters
{- $errors
Error matchers are designed for use with the functions provided by
<http://hackage.haskell.org/package/lens/docs/Control-Exception-Lens.html Control.Exception.Lens>.
This allows catching (and rethrowing) service specific errors returned
by 'MachineLearning'.
-}
{- $operations
Some AWS operations return results that are incomplete and require subsequent
requests in order to obtain the entire result set. The process of sending
subsequent requests to continue where a previous request left off is called
pagination. For example, the 'ListObjects' operation of Amazon S3 returns up to
1000 objects at a time, and you must send subsequent requests with the
appropriate Marker in order to retrieve the next page of results.
Operations that have an 'AWSPager' instance can transparently perform subsequent
requests, correctly setting Markers and other request facets to iterate through
the entire result set of a truncated API operation. Operations which support
this have an additional note in the documentation.
Many operations have the ability to filter results on the server side. See the
individual operation parameters for details.
-}
{- $waiters
Waiters poll by repeatedly sending a request until some remote success condition
configured by the 'Wait' specification is fulfilled. The 'Wait' specification
determines how many attempts should be made, in addition to delay and retry strategies.
-}
| olorin/amazonka | amazonka-ml/gen/Network/AWS/MachineLearning.hs | mpl-2.0 | 10,864 | 0 | 5 | 2,297 | 986 | 715 | 271 | 210 | 0 |
module Reinforcement (
Actor,
isStudent,
actorNNet,
selfTrain
) where
import Arithmetic (
Range,
fromBytes,
DecodeT(..),
runDecode,
runDecodeT,
randomA,
modelDecode,
truncate
)
import Board
import Neural
import Control.Monad.Identity
import Control.Monad.Morph (hoist)
import Control.Monad.Writer
import Data.List
import Data.Monoid
import qualified Data.Map as M
import System.Random
data Actor m =
Teacher Double NNet |
Student Double NNet |
Outsider (Player -> Board -> m Board) (Player -> Board -> m ())
actorNNet :: Actor m -> Maybe NNet
actorNNet (Teacher _ n) = Just n
actorNNet (Student _ n) = Just n
actorNNet _ = Nothing
actorSolidarity :: Actor m -> Double
actorSolidarity (Teacher s _) = s
actorSolidarity (Student s _) = s
actorSolidarity _ = 0
isStudent (Student _ _) = True
isStudent _ = False
-- Note: the first argument of this function represents the player
-- that just moved. The Board arguments are the previous and current
-- boards.
actorNotify :: (Monad m) =>
Player -> Board -> Board -> Actor m -> m (Actor m)
actorNotify p b1 b2 (Student c n) = return $ Student c $ updateENet p b1 b2 n
actorNotify _ _ _ t@(Teacher _ _) = return t
actorNotify p b1 b2 o@(Outsider _ nf) = do
nf p b2
return o
actorMove :: (Monad m) => Actor m -> Player -> Board -> DecodeT m Board
actorMove a p b = case actorNNet a of
Just nn -> selectMove (actorSolidarity a) nn p b
Nothing -> let
Outsider mf _ = a
in lift $ mf p b
{-
Represent the board as a list of doubles for feeding to the neural network.
Explanation of magic numbers:
A tile can face 6 directions, there are up to 3 players in the game,
6*3 = 18.
So each space on the grid gets 18 inputs, only 1 of which will be set to 1
at any given time.
There are 91 spaces on the grid, so the total number of inputs is 1638.
-}
mapBoard :: Player -> Board -> [Double]
mapBoard p b = concatMap mapTile boardRange where
mapTile l = case M.lookup l b of
Nothing -> replicate 18 0
Just (p,d) -> let
tn = tileN p d
in replicate tn 0 ++ [1] ++ replicate (17 - tn) 0
playing = whichPlayersFrom p b
pns = M.fromList $ zip playing [0,6..]
tileN p d = pns M.! p + fromEnum d
evaluate :: NNet -> Player -> Board -> M.Map Player (Double,Double)
evaluate nn p b = let
playing = whichPlayersFrom p b
everyoneLost = M.fromList $ zip [Red .. Blue] $ repeat (0,0)
nnInputs = mapBoard p b
nnOutputs = feedforward nn nnInputs
outputPairs = op nnOutputs where
op (a:b:r) = (a,b) : op r
op _ = []
outputPairs' = case length playing of
2 -> map (\(_,v) -> (1,v)) outputPairs
3 -> outputPairs
1 -> [(1,1)]
in M.union (M.fromList $ zip playing outputPairs') everyoneLost
isomorphisms b = nub $ do
b' <- [b,flipBoard b]
b' : map (`rotateBoard` b') [1..5]
updateENet :: Player -> Board -> Board -> NNet -> NNet
updateENet p bo bt n = let
playing = whichPlayersFrom p bo
nextPlayer = cycle playing !! 1
er = evaluate n nextPlayer bt
mo = case length playing of
3 -> map Just $ concatMap ((\(a,b) -> [a,b]) . (er M.!)) playing
2 -> take 6 $
concatMap ((\(_,b) -> [Nothing, Just b]) . (er M.!)) playing ++
repeat Nothing
ufs = map (\b ->
backpropSome 0.1 (mapBoard p b) mo
) $ isomorphisms bo
in foldl1' (.) ufs n
selectMove :: (Monad m) => Double -> NNet -> Player -> Board ->
DecodeT m Board
selectMove s n p b = let
playing = whichPlayersFrom p b
nextPlayer = cycle playing !! 1
sf = case length playing of
2 -> flip const
3 -> \b a -> b*s + a
in modelDecode $ map (\b' ->
(uncurry sf (evaluate n nextPlayer b' M.! p),b')
) $ genMoves p b
-- I am using a transformer stack, yet I'm manually passing state. Yes I know.
continueGame :: (Monad m) =>
Player ->
Board ->
M.Map Player (Actor m) ->
DecodeT (WriterT (Endo [(M.Map Player (Actor m),Board)]) m) Board
continueGame cp' b a = do
let
playing = whichPlayersFrom cp' b
cp = head playing
if null $ tail playing
then do
lift $ tell $ Endo ([(a,b)]++)
return b
else do
let ca = a M.! cp
move <- hoist lift $ actorMove ca cp b
a' <- lift $ lift $ fmap M.fromList $ mapM (\(p1,a1) -> do
a2 <- actorNotify cp b move a1
return (p1,a2)
) $ M.toList a
lift $ tell $ Endo ([(a',move)]++)
Arithmetic.truncate $ toRational . (fromRational :: Rational -> Double)
continueGame (playing !! 1) move a'
selfTrain :: NNet -> NNet
-> DecodeT
(WriterT (Endo [(M.Map Player (Actor Identity),Board)]) Identity)
Board
selfTrain t s = do
~(start,players) <- modelDecode
[(1,(startBoard2,[Blue,Red])),(1,(startBoard3,[Blue,Green,Red]))]
student <- modelDecode $
map (\x -> (1,x)) players
let
teacherA = Teacher 0.5 t
studentA = Student 0.5 s
actors = M.insert student studentA $ M.fromList $ map
(\p -> (p,teacherA))
players
lift $ tell $ Endo ((actors,start):)
continueGame Blue start actors
| quickdudley/varroa | Reinforcement.hs | agpl-3.0 | 5,006 | 0 | 20 | 1,223 | 2,054 | 1,062 | 992 | 142 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Store.Transcoder
( runTranscoder
, initTranscoder
, transcodeEnabled
) where
import Data.Maybe (isJust)
import Data.Version (showVersion)
import System.Process (readProcessWithExitCode)
import System.Exit (ExitCode(..))
import Paths_databrary (version, getDataFileName)
import Store.Types
runTranscoder :: Transcoder -> [String] -> IO (ExitCode, String, String)
runTranscoder (Transcoder cmd arg _) args =
readProcessWithExitCode cmd (arg ++ args) ""
-- | Ensures the configured transcoder works, returning it as a capability. Will
-- throw a synchronous exception if the transcoder can't be run.
initTranscoder :: TranscoderConfig -> IO (Maybe Transcoder)
initTranscoder tconf@TranscoderConfig {..} = case (transcoderHost, transcoderDir) of
(Nothing, Nothing) -> return Nothing
_ -> Just <$> do
cmd <- getDataFileName "transctl.sh"
let t =
Transcoder cmd
(["-v", showVersion version]
++ maybe [] (\d -> ["-d", d]) transcoderDir
++ maybe [] (\h -> ["-h", h]) transcoderHost
++ maybe [] (\m -> ["-m", m]) transcoderMount
)
tconf
(r, out, err) <- runTranscoder t ["-t"]
case r of
ExitSuccess -> return t
ExitFailure e ->
fail
("initTranscoder test: "
++ show e
++ "\n=== STDOUT ===\n"
++ out
++ "\n=== STDERR ===\n"
++ err)
transcodeEnabled :: Storage -> Bool
transcodeEnabled = isJust . storageTranscoder
| databrary/databrary | src/Store/Transcoder.hs | agpl-3.0 | 1,730 | 0 | 22 | 543 | 428 | 233 | 195 | 40 | 3 |
module API(CLIInterface(..), testio) where
import Data.Typeable
data CLIInterface = CLIInterface {
repl :: IO ()
} deriving Typeable
testio :: CLIInterface
testio = CLIInterface { repl = return () }
| stepcut/plugins | testsuite/load/plain/api/API.hs | lgpl-2.1 | 206 | 0 | 10 | 37 | 67 | 40 | 27 | 7 | 1 |
{-# LANGUAGE FlexibleInstances, UndecidableInstances #-}
module HN.Optimizer.ClassyLattice where
import Compiler.Hoopl
import Data.Maybe
import qualified Data.Map as M
class Lattice a where
join :: OldFact a -> NewFact a -> Maybe a
join = fromJoinFun $ fact_join dataflowLattice
bot :: a
bot = fact_bot dataflowLattice
dataflowLattice :: DataflowLattice a
dataflowLattice = DataflowLattice
{ fact_name = "hooplLattice"
, fact_bot = bot
, fact_join = \_ o @ (OldFact oo) n -> maybe (NoChange, oo) ((,) SomeChange) $ join o n
}
instance Lattice a => Monoid a where
mappend = mereJoin
mempty = bot
instance (Lattice v, Ord k) => Lattice (M.Map k v) where
bot = M.empty
join = fromJoinFun $ joinMaps $ fact_join dataflowLattice
fromJoinFun :: JoinFun a -> OldFact a -> NewFact a -> Maybe a
fromJoinFun jf a b = case jf (error "No label") a b of
(SomeChange, n) -> Just n
_ -> Nothing
mereJoin a b = fromMaybe a $ join (OldFact a) (NewFact b)
instance (Lattice a, Lattice b) => Lattice (a, b) where
dataflowLattice = pairLattice dataflowLattice dataflowLattice
| ingvar-lynn/HNC | HN/Optimizer/ClassyLattice.hs | lgpl-3.0 | 1,083 | 5 | 14 | 202 | 400 | 209 | 191 | 28 | 2 |
import UI.NCurses
import Core (readProcStat)
main :: IO ()
main = runCurses $ do
setEcho False
w <- defaultWindow
updateWindow w $ do
moveCursor 1 10
drawString "Hello world!"
moveCursor 3 30
drawString "(press q to quit)"
moveCursor 0 0
render
waitFor w (\ev -> ev == EventCharacter 'q' || ev == EventCharacter 'Q')
waitFor :: Window -> (Event -> Bool) -> Curses ()
waitFor w p = loop where
loop = do
ev <- getEvent w Nothing
case ev of
Nothing -> loop
Just ev' -> if p ev' then return () else loop
| TOSPIO/vtop | src/Main.hs | lgpl-3.0 | 606 | 0 | 14 | 201 | 221 | 102 | 119 | 21 | 3 |
module WordNumber where
import Data.List (intercalate)
digitToWord :: Int -> String
digitToWord n =
case n of
0 -> "zero"
1 -> "one"
2 -> "two"
3 -> "three"
4 -> "four"
5 -> "five"
6 -> "six"
7 -> "seven"
8 -> "eight"
9 -> "nine"
_ -> "wat"
digits :: Int -> [Int]
digits n
| n < 10 = [n]
| otherwise = digits (fst res) ++ [snd res]
where res = divMod n 10
wordNumber :: Int -> String
wordNumber n =
intercalate "-" $ map digitToWord $ digits n
| thewoolleyman/haskellbook | 14/07/maor/WordNumber.hs | unlicense | 505 | 0 | 9 | 155 | 208 | 105 | 103 | 24 | 11 |
{-# LANGUAGE OverloadedStrings #-}
module Commi.Util(
getElementPosition
, styleBlock
, getMousePosition
, getDocumentSize
, cbutton
, cbuttonM
, timeout
, clearTimers
, wloop
, panel
, labelRow
, binToInt
, binToDouble
, debugMsg
, listIndex
, listSet
) where
import Prelude hiding (div, id)
import qualified Prelude
import Haste
import Haste.Foreign
import Haste.App (MonadIO)
import Haste.Prim
import Control.Applicative
import Control.Monad.IO.Class (liftIO)
import Control.Monad
import Data.Monoid
import Haste.Perch (ToElem, Perch, nelem, child, span)
import Haste.HPlay.View hiding (head)
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef
import Unsafe.Coerce
import Debug.Trace
newtype JQuery = JQuery JSAny
newtype JPosition = JPosition JSAny
newtype JMouse = JMouse JSAny
newtype JDocument = JDocument JSAny
jsJquery :: JSString -> IO JQuery
jsJquery v = JQuery <$> ffi "(function(e) {return $(e);})" v
jsOffset :: JQuery -> IO JPosition
jsOffset (JQuery jsany) = JPosition <$> ffi "(function(jq) {return jq.offset();})" jsany
jsLeft :: JPosition -> IO Int
jsLeft (JPosition jsany) = ffi "(function(jp) {return jp.left;})" jsany
jsTop :: JPosition -> IO Int
jsTop (JPosition jsany) = ffi "(function(jp) {return jp.top;})" jsany
jsMouse :: IO JMouse
jsMouse = JMouse <$> ffi "(function() {return mouse;})"
jsMouseX :: JMouse -> IO Int
jsMouseX (JMouse jsany) = ffi "(function(m) {return m.x;})" jsany
jsMouseY :: JMouse -> IO Int
jsMouseY (JMouse jsany) = ffi "(function(m) {return m.y;})" jsany
jsDocument :: IO JDocument
jsDocument = JDocument <$> ffi "(function() {return document.documentElement;})"
jsPageScrollX :: JDocument -> IO Int
jsPageScrollX (JDocument jsany) =
ffi "(function(doc) {return (window.pageXOffset || doc.scrollLeft) - (doc.clientLeft || 0);})" jsany
jsPageScrollY :: JDocument -> IO Int
jsPageScrollY (JDocument jsany) =
ffi "(function(doc) {return (window.pageYOffset || doc.scrollTop) - (doc.clientTop || 0);})" jsany
jsDocumentWidth :: IO Int
jsDocumentWidth = ffi "(function() {return $(document).width();})"
jsDocumentHeight :: IO Int
jsDocumentHeight = ffi "(function() {return $(document).height();})"
jsSetOnPageLoad :: JSFun a -> IO ()
jsSetOnPageLoad (JSFun f) = ffi "(function (c) { document.onload = c; })"
$ (unsafeCoerce f :: JSAny)
-- | Since we can't name it '$', let's just call it 'j'.
j :: JSString -> (JQuery -> IO a) -> IO a
j s action = jsJquery s >>= action
-- | Returns element position
getElementPosition :: String -> IO (Int, Int)
getElementPosition sel = j (toJSStr sel) $ \jq -> do
pos <- jsOffset jq
xpos <- jsLeft pos
ypos <- jsTop pos
return (xpos, ypos)
getMousePosition :: IO (Int, Int)
getMousePosition = do
m <- jsMouse
x <- jsMouseX m
y <- jsMouseY m
doc <- jsDocument
sx <- jsPageScrollX doc
sy <- jsPageScrollY doc
return (x+sx, y+sy)
getDocumentSize :: IO (Int, Int)
getDocumentSize = do
x <- jsDocumentWidth
y <- jsDocumentHeight
return (x, y)
styleBlock :: ToElem a => a -> Perch
styleBlock cont = nelem "style" `child` cont
-- | active button. When clicked, return the first parameter
cbutton :: a -> String -> Widget a
cbutton x slabel= static $ do
button slabel ! id slabel ! atr "class" "btn btn-primary" ! atr "type" "button" ! atr "style" "margin-right: 10px; margin-left: 10px; margin-top: 3px" `pass` OnClick
return x
`continuePerch` slabel
cbuttonM :: IO a -> String -> Widget a
cbuttonM x slabel= static $ do
button slabel ! id slabel ! atr "class" "btn btn-primary" ! atr "type" "button" ! atr "style" "margin-right: 10px; margin-left: 10px; margin-top: 3px" `pass` OnClick
liftIO x
`continuePerch` slabel
timeoutStore :: IORef [String]
timeoutStore = unsafePerformIO $ newIORef []
storeTimeout :: String -> IO ()
storeTimeout ids = do
idss <- readIORef timeoutStore
writeIORef timeoutStore $ ids : idss
removeTimeout :: String -> IO ()
removeTimeout ids = do
idss <- readIORef timeoutStore
writeIORef timeoutStore $ filter (/= ids) idss
hasTimeout :: String -> IO Bool
hasTimeout ids = elem ids <$> readIORef timeoutStore
peekTimeout :: String -> Widget ()
peekTimeout ids = do
idss <- liftIO $ readIORef timeoutStore
case ids `elem` idss of
True -> noWidget
False -> return ()
clearTimers :: IO ()
clearTimers = writeIORef timeoutStore []
timeout :: Int -> Widget a -> Widget a
timeout mss wa = do
id <- genNewId
liftIO $ storeTimeout id
cont <- getCont
ht <- liftIO $ hasTimeout id
when ht $ setTimeout mss $ do
ht' <- liftIO $ hasTimeout id
when ht' $ do
removeTimeout id
runCont cont
peekTimeout id
wa
wloop :: a -> (a -> Widget a) -> Widget ()
wloop initialState wa = View $ do
nid <- genNewId
FormElm form mx <- runView $ go nid initialState
return $ FormElm ((Haste.Perch.span ! atr "id" nid $ noHtml) <> form) mx
where
go nid state = do
nextState <- at nid Insert (wa state)
go nid nextState
panel :: String -> Perch -> Perch
panel ts bd = div ! atr "class" "panel panel-default" $ mconcat [
div ! atr "class" "panel-heading" $ h3 ! atr "class" "panel-title" $ toJSString ts
, div ! atr "class" "panel-body" $ bd
]
labelRow :: Int -> String -> String -> Perch
labelRow ri ts vs = div ! atr "class" "row" $ mconcat [
div ! atr "class" (colClass ri) $ label $ toJSString ts
, div ! atr "class" (colClass (12-ri)) $ toJSString vs
]
where colClass i = "col-md-" ++ show i
binToInt :: [Int] -> Int
binToInt chromosome = binToInt' 0 base chromosome
where base = 2^(length chromosome-1)
binToInt':: Int -> Int -> [Int] -> Int
binToInt' acc 0 [x] = acc+x
binToInt' acc base (x:xs) = binToInt' (acc+x*base) newBase xs
where newBase = Prelude.div base 2
binToInt' acc _ _ = acc
binToDouble:: Double -> [Int] -> Double
binToDouble n chromosome = binToDouble' 0 base chromosome
where base = 2**(n-1) -- | n = 1 means 1 digit before "."; means (2^0)*digit
binToDouble':: Double -> Double -> [Int] -> Double
binToDouble' acc _ [] = acc
binToDouble' acc base (x:xs) = binToDouble' newAcc newBase xs
where
newAcc = acc + (fromIntegral x)*base
newBase = base / 2.0
debugMsg :: (MonadIO m, Show a) => a -> m ()
debugMsg = liftIO . writeLog . show
listIndex :: {-Show a =>-} [a] -> Int -> a
listIndex as i = {-traceShow (as, i) $ -} as !! i
listSet :: [a] -> Int -> a -> a -> [a]
listSet as i defaultA a
| i < length as = take i as ++ [a] ++ drop (i+1) as
| otherwise = listSet (as ++ replicate (i + 1 - length as) defaultA) i defaultA a | Teaspot-Studio/bmstu-commi-genetics-haste | Commi/Util.hs | bsd-3-clause | 6,650 | 0 | 15 | 1,379 | 2,285 | 1,147 | 1,138 | 175 | 2 |
{-# OPTIONS_GHC -Wall #-}
{-# LANGUAGE Safe #-}
{-# LANGUAGE TypeFamilies #-}
module Data.FingerTree.Sequence where
import Data.Monoid (Monoid(..), (<>))
import Control.DeepSeq.Generics
import Data.Foldable (Foldable)
import qualified Data.Foldable as F
import Data.FingerTree (FingerTree, Measured(..), Split(..))
import qualified Data.FingerTree as FT
type Index = Int
newtype Size = Size { getSize :: Index } deriving (Eq, Show, Ord)
instance NFData Size where
rnf = rnf . getSize
instance Monoid Size where
mempty = Size 0
{-# INLINE mappend #-}
(Size a) `mappend` (Size b) = Size $ a + b
newtype Element a = Element { getElement :: a } deriving (Eq, Ord)
instance NFData a => NFData (Element a) where
rnf = rnf . getElement
instance Measured (Element a) where
type Measure (Element a) = Size
{-# INLINE measure #-}
measure (Element _) = Size 1
newtype Sequence a = Sequence { getSequence :: (FingerTree Size (Element a)) }
deriving (Eq, Ord)
instance NFData a => NFData (Sequence a) where
rnf = rnf . getSequence
instance Monoid (Sequence a) where
mempty = Sequence mempty
(Sequence a) `mappend` (Sequence b) = Sequence $ a <> b
empty :: Sequence a
empty = Sequence FT.Empty
(<|) :: a -> Sequence a -> Sequence a
a <| sq = Sequence $ (Element a) FT.<| (getSequence sq)
(|>) :: Sequence a -> a -> Sequence a
sq |> a = Sequence $ (getSequence sq) FT.|> (Element a)
fromFoldable :: Foldable f => f a -> Sequence a
fromFoldable = F.foldr' (<|) empty
length :: Sequence a -> Index
length = getSize . measure . getSequence
(!) :: Sequence a -> Index -> a
(Sequence s) ! i = getElement x
where
Split _ x _ = FT.splitTree (Size i <) (Size 0) s
splitAt :: Index -> Sequence a -> (Sequence a, Sequence a)
splitAt i (Sequence x) = (Sequence a, Sequence b)
where
(a, b) = FT.split (Size i <) x
viewL :: Sequence a -> FT.ViewL Sequence a
viewL (Sequence s) = case FT.viewL s of
FT.EmptyL -> FT.EmptyL
(Element a) FT.:< tree -> a FT.:< Sequence tree
viewR :: Sequence a -> FT.ViewR Sequence a
viewR (Sequence s) = case FT.viewR s of
FT.EmptyR -> FT.EmptyR
tree FT.:> (Element a) -> Sequence tree FT.:> a
drop :: Index -> Sequence a -> Sequence a
drop n = Sequence . snd . FT.split (Size n <) . getSequence
take :: Index -> Sequence a -> Sequence a
take n = Sequence . fst . FT.split (Size n <) . getSequence
| peddie/fingertrees | src/Data/FingerTree/Sequence.hs | bsd-3-clause | 2,384 | 0 | 10 | 502 | 1,018 | 536 | 482 | 60 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE QuasiQuotes #-}
module Feldspar.Multicore.Compile.Parallella.Host where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Feldspar.Multicore.Compile.Parallella.Access
import Feldspar.Multicore.Compile.Parallella.Channel
import Feldspar.Multicore.Compile.Parallella.Core
import Feldspar.Multicore.Compile.Parallella.Esdk
import Feldspar.Multicore.Compile.Parallella.Imports
import Feldspar.Multicore.Compile.Parallella.State
import Feldspar.Run.Concurrent (forkWithId)
import Feldspar.Run.Compile (env0, ProgC, translate, TargetCMD)
import qualified Language.C.Monad as C (_funUsedVars, _globals, _includes, CEnv, cenvToCUnit, wrapMain)
import Language.C.Quote.C
import qualified Language.C.Syntax as C (Id (..))
import qualified Language.Embedded.Concurrent.CMD as Imp
import qualified Language.Embedded.Imperative.CMD as Imp
--------------------------------------------------------------------------------
-- Forking threads
--------------------------------------------------------------------------------
compThreadCMD :: Imp.ThreadCMD (Param3 RunGen Data PrimType') a -> RunGen a
compThreadCMD (Imp.ForkWithId p) = do
s <- genState
let p' = evalGen s . p
fromRun $ forkWithId p'
instance Interp Imp.ThreadCMD RunGen (Param2 Data PrimType')
where interp = compThreadCMD
--------------------------------------------------------------------------------
-- Core-local array fetch and flush
--------------------------------------------------------------------------------
compLocalBulkArrCMD :: (BulkArrCMD LArr) (Param3 RunGen exp pred) a -> RunGen a
compLocalBulkArrCMD (WriteArr offset spm range ram) =
compLocalCopy "host_write_local" spm ram offset range
compLocalBulkArrCMD (ReadArr offset spm range ram) =
compLocalCopy "host_read_local" spm ram offset range
instance Interp (BulkArrCMD LArr) RunGen (Param2 exp pred)
where interp = compLocalBulkArrCMD
compLocalCopy :: PrimType a => String
-> DLArr a -> Arr (Data a)
-> Data Index -> IndexRange -> RunGen ()
compLocalCopy op spm ram offset (lower, upper) = do
groupAddr <- gets group
(r, c) <- gets $ groupCoordsForName (arrayRefName spm)
lift $ addInclude "<feldspar-parallella.h>"
lift $ callProc op
[ groupAddr
, valArg $ value r
, valArg $ value c
, arrArg (unwrapArr spm)
, arrArg ram
, valArg offset
, valArg lower
, valArg upper
]
--------------------------------------------------------------------------------
-- Shared array fetch and flush
--------------------------------------------------------------------------------
compSharedBulkArrCMD :: (BulkArrCMD SArr) (Param3 RunGen exp pred) a -> RunGen a
compSharedBulkArrCMD (WriteArr offset spm range ram) =
compSharedCopy "host_write_shared" spm ram offset range
compSharedBulkArrCMD (ReadArr offset spm range ram) =
compSharedCopy "host_read_shared" spm ram offset range
instance Interp (BulkArrCMD SArr) RunGen (Param2 exp pred)
where interp = compSharedBulkArrCMD
compSharedCopy :: PrimType a => String
-> DSArr a -> Arr (Data a)
-> Data Index -> IndexRange -> RunGen ()
compSharedCopy op spm ram offset (lower, upper) = do
shmRef <- gets $ shmRefForName $ arrayRefName spm
lift $ addInclude "<feldspar-parallella.h>"
lift $ callProc op
[ shmRef
, arrArg ram
, valArg offset
, valArg lower
, valArg upper
]
--------------------------------------------------------------------------------
-- Halting cores
--------------------------------------------------------------------------------
compHostHaltCMD :: CoreHaltCMD (Param3 RunGen exp pred) a -> RunGen a
compHostHaltCMD (HaltCore (CoreRefComp coreId)) = do
groupAddr <- gets group
let (r, c) = groupCoord coreId
lift $ callProc "e_halt" [ groupAddr, valArg $ value r, valArg $ value c ]
instance Interp CoreHaltCMD RunGen (Param2 exp pred)
where interp = compHostHaltCMD
--------------------------------------------------------------------------------
-- Spawning core programs
--------------------------------------------------------------------------------
moduleName :: CoreId -> String
moduleName = ("core" ++) . show
compMulticoreCMD :: MulticoreCMD (Param3 RunGen exp pred) a -> RunGen a
compMulticoreCMD (OnCore coreId comp) = do
s <- genState
let coreRef = CoreRefComp coreId
compCore coreId
$ evalGen s
$ interpretT ((lift :: Run a -> CoreGen a) . liftRun)
$ unCoreComp
$ comp coreRef
groupAddr <- gets group
let (r, c) = groupCoord coreId
lift $ addInclude "<e-loader.h>"
lift $ callProc "e_load"
[ strArg $ moduleName coreId ++ ".srec"
, groupAddr
, valArg $ value r
, valArg $ value c
, valArg (value 1 :: Data Int32) {- E_TRUE -}
]
return coreRef
instance Interp MulticoreCMD RunGen (Param2 exp pred)
where interp = compMulticoreCMD
--------------------------------------------------------------------------------
-- Core program alignment transformation
--------------------------------------------------------------------------------
type TargetPrams = '(Program TargetCMD (Param2 Prim PrimType'), Param2 Prim PrimType')
alignArrays :: ProgC a -> ProgC a
alignArrays = go . view
where
go :: ProgramView TargetCMD (Param2 Prim PrimType') a -> ProgC a
go (Return x) = unview (Return x)
go (x :>>= y) = unview (align (traverse x) :>>= \r -> go (view (y r)))
traverse :: forall a. TargetCMD TargetPrams a -> TargetCMD TargetPrams a
traverse x = case (prj x :: Maybe (Imp.ControlCMD TargetPrams a)) of
Just (Imp.If cond t f) -> inj $ Imp.If cond (alignArrays t) (alignArrays f)
Just (Imp.For range body) -> inj $ Imp.For range (\i -> alignArrays (body i))
Just (Imp.While cond body) -> inj $ Imp.While (alignArrays cond) (alignArrays body)
_ -> x
align :: forall a. TargetCMD TargetPrams a -> TargetCMD TargetPrams a
align x = case (prj x :: Maybe (Imp.ArrCMD TargetPrams a)) of
Just (Imp.NewArr base len) -> inj $ Imp.NewCArr base al len
Just (Imp.ConstArr base as) -> inj $ Imp.ConstCArr base al as
_ -> x
where
al :: forall i. Integral i => Maybe i
al = Just $ fromIntegral dmaAlign
--------------------------------------------------------------------------------
-- Core program main wrapper and declaration generator
--------------------------------------------------------------------------------
compCore :: CoreId -> Run () -> RunGen ()
compCore coreId comp = do
-- compile the core program to C and collect the resulting environment
let (_, env) = cGen $ do
addCoreSpecification coreId
C.wrapMain $ interpret $ alignArrays $ translate env0 comp
-- collect pre-allocated local and shared arrays used by core main
arrayDecls <- mkArrayDecls coreId (mainUsedVars env)
-- merge type includes and array definitions
inclMap <- gets inclMap
let coreTypeIncludes = fromMaybe Set.empty (Map.lookup coreId inclMap)
sharedTypeIncludes = fromMaybe Set.empty (Map.lookup sharedId inclMap)
env' = env { C._includes = C._includes env
`Set.union` coreTypeIncludes
`Set.union` sharedTypeIncludes
-- cenvToCUnit will reverse the order of definitions
, C._globals = C._globals env ++ reverse arrayDecls }
-- merge contents to the core module
lift $ inModule (moduleName coreId)
$ mapM_ addDefinition (C.cenvToCUnit env')
mainUsedVars :: C.CEnv -> [Name]
mainUsedVars
= map (\(C.Id name _) -> name)
. maybe [] Set.toList
. Map.lookup "main"
. C._funUsedVars
mkArrayDecls :: CoreId -> [Name] -> RunGen [Definition]
mkArrayDecls coreId usedVars = do
nameMap <- gets nameMap
let arrayVars = filter (isJust . flip Map.lookup nameMap) usedVars
forM arrayVars $ mkArrayDecl coreId
mkArrayDecl :: CoreId -> Name -> RunGen Definition
mkArrayDecl coreId name = do
typeMap <- gets typeMap
nameMap <- gets nameMap
let Just ty = Map.lookup name typeMap
Just (coreId', addr) = Map.lookup name nameMap
-- convert address to global when the given array is on another core
addr'
| coreId' == coreId = addr
| otherwise = addr `toGlobal` coreId'
return $ if coreId' == sharedId
then [cedecl| volatile void * const $id:name = (void *)$addr; |]
else [cedecl| volatile $ty:ty * const $id:name = ($ty:ty *)$addr'; |]
| kmate/raw-feldspar-mcs | src/Feldspar/Multicore/Compile/Parallella/Host.hs | bsd-3-clause | 8,744 | 0 | 16 | 1,852 | 2,373 | 1,213 | 1,160 | -1 | -1 |
{-# LANGUAGE LambdaCase #-}
import System.Environment (getArgs)
import DatabaseController
( saveNote
, openDefaultDb
)
import Input (getNote)
main :: IO ()
main = getArgs >>= \case
[] -> getNote
text -> return (unwords text)
>>= \note ->
openDefaultDb >>= \db_file ->
saveNote db_file note >>
return ()
| klausweiss/linotes | app/Note.hs | bsd-3-clause | 363 | 0 | 12 | 107 | 107 | 57 | 50 | 14 | 2 |
module DimensionalExampleSpec where
import DimensionalExample (requiredStrideLength)
import Prelude hiding ((+))
import Numeric.Units.Dimensional.Prelude
( (*~), (/~)
, (+)
, Length, Time, kilo, meter, minute, second
)
import Numeric.Units.Dimensional.NonSI (foot)
import Test.Hspec (Spec, hspec, describe, it, shouldSatisfy)
-- | Required for auto-discovery.
spec :: Spec
spec =
describe "dimensional" $ do
it "check required running stride length" $
let fiveK :: Length Double
fiveK = 5 *~ kilo meter
goalTime :: Time Double
goalTime = 24 *~ minute + 45 *~ second
feetPerStep :: Double
feetPerStep = requiredStrideLength fiveK goalTime /~ foot
in feetPerStep `shouldSatisfy` (\x -> x > 3 && x < 4)
main :: IO ()
main = hspec spec
| FranklinChen/twenty-four-days2015-of-hackage | test/DimensionalExampleSpec.hs | bsd-3-clause | 835 | 0 | 16 | 213 | 241 | 141 | 100 | 22 | 1 |
import qualified Data.Map as Map
import qualified Data.Ord as Ord
import qualified Data.List as List
import qualified Data.MemoCombinators as Memo
import Data.Maybe
collatz :: Int -> Int
collatz n =
if even n
then n `div` 2
else (3 * n) + 1
collatz_seq_len :: Int -> Int
collatz_seq_len = Memo.integral collatz_seq_len'
where
collatz_seq_len' :: Int -> Int
collatz_seq_len' n =
if n == 1
then 1
else ((+) 1) . collatz_seq_len $ collatz n
elem_with_longest_seq :: Int
elem_with_longest_seq
= List.maximumBy (Ord.comparing collatz_seq_len) [2..1000000]
main :: IO ()
main = do
putStrLn . show $ elem_with_longest_seq | bgwines/project-euler | src/solved/problem14.hs | bsd-3-clause | 643 | 14 | 11 | 124 | 223 | 125 | 98 | 23 | 2 |
module HasOffers.API.Brand.OfferDisabledLink
where
import Data.Text
import GHC.Generics
import Data.Aeson
import Control.Applicative
import Network.HTTP.Client
import qualified Data.ByteString.Char8 as BS
import HasOffers.API.Common
--------------------------------------------------------------------------------
delete params =
Call "OfferDisabledLink"
"delete"
"POST"
[ Param "id" True $ getParam params 0
]
findAll params =
Call "OfferDisabledLink"
"findAll"
"GET"
[ Param "filters" False $ getParam params 0
, Param "sort" False $ getParam params 1
, Param "limit" False $ getParam params 2
, Param "page" False $ getParam params 3
, Param "fields" False $ getParam params 4
, Param "contain" False $ getParam params 5
]
findAllAvailable params =
Call "OfferDisabledLink"
"findAllAvailable"
"GET"
[ Param "filters" False $ getParam params 0
, Param "sort" False $ getParam params 1
, Param "limit" False $ getParam params 2
, Param "page" False $ getParam params 3
, Param "fields" False $ getParam params 4
, Param "contain" False $ getParam params 5
]
findAllByIds params =
Call "OfferDisabledLink"
"findAllByIds"
"GET"
[ Param "ids" False $ getParam params 0
, Param "fields" False $ getParam params 1
, Param "contain" False $ getParam params 2
]
findById params =
Call "OfferDisabledLink"
"findById"
"GET"
[ Param "id" False $ getParam params 0
, Param "fields" False $ getParam params 1
, Param "contain" False $ getParam params 2
]
save params =
Call "OfferDisabledLink"
"save"
"POST"
[ Param "data" True $ getParam params 0
]
| kelecorix/api-hasoffers | src/HasOffers/API/Brand/OfferDisabledLink.hs | bsd-3-clause | 1,888 | 0 | 8 | 578 | 492 | 242 | 250 | 52 | 1 |
import Distribution.Simple
import System.Environment
main = defaultMain
| letusfly85/BackupJenkinsJobConfigure | Setup.hs | bsd-3-clause | 72 | 0 | 4 | 7 | 16 | 9 | 7 | 3 | 1 |