code
stringlengths
5
1.03M
repo_name
stringlengths
5
90
path
stringlengths
4
158
license
stringclasses
15 values
size
int64
5
1.03M
n_ast_errors
int64
0
53.9k
ast_max_depth
int64
2
4.17k
n_whitespaces
int64
0
365k
n_ast_nodes
int64
3
317k
n_ast_terminals
int64
1
171k
n_ast_nonterminals
int64
1
146k
loc
int64
-1
37.3k
cycloplexity
int64
-1
1.31k
module Systat.Module.Battery (battery) where import Text.Regex.Posix import Systat.Module battery :: Module battery = Module { name = "battery" , prefix = "⚡: " , command = "acpi" , args = ["-b"] , parse = parseInput } parseInput :: String -> IO (ModuleState, String) parseInput input = do let out = tail $ head (input =~ pattern :: [[String]]) charging = head out == "Charging" percent = read (out !! 1) :: Int time = " (" ++ out !! 2 ++ ")" result = (if charging then "+" else "-") ++ show percent ++ "%" ++ (if not charging then time else "") state = case percent of n | n < 10 -> Critical | n < 20 -> Warning _ -> Good return (state, result) where pattern = "Battery [0-9]: (.+), (.+)%, (.+) (remaining)?"
mfaerevaag/systat
src/Systat/Module/Battery.hs
mit
791
0
16
215
281
156
125
23
4
{-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} -- | Lastfm group API -- -- This module is intended to be imported qualified: -- -- @ -- import qualified Lastfm.Group as Group -- @ module Lastfm.Group ( getHype, getMembers, getWeeklyAlbumChart, getWeeklyArtistChart, getWeeklyChartList, getWeeklyTrackChart ) where import Lastfm.Request -- | Get the hype list for a group -- -- <http://www.last.fm/api/show/group.getHype> getHype :: Request f (Group -> APIKey -> Ready) getHype = api "group.getHype" -- | Get a list of members for this group. -- -- Optional: 'page', 'limit' -- -- <http://www.last.fm/api/show/group.getMembers> getMembers :: Request f (Group -> APIKey -> Ready) getMembers = api "group.getMembers" -- | Get an album chart for a group, for a given date range. -- If no date range is supplied, it will return the most recent album chart for this group. -- -- Optional: 'from', 'to' -- -- <http://www.last.fm/api/show/group.getWeeklyAlbumChart> getWeeklyAlbumChart :: Request f (Group -> APIKey -> Ready) getWeeklyAlbumChart = api "group.getWeeklyAlbumChart" -- | Get an artist chart for a group, for a given date range. -- If no date range is supplied, it will return the most recent album chart for this group. -- -- Optional: 'from', 'to' -- -- <http://www.last.fm/api/show/group.getWeeklyArtistChart> getWeeklyArtistChart :: Request f (Group -> APIKey -> Ready) getWeeklyArtistChart = api "group.getWeeklyArtistChart" -- | Get a list of available charts for this group, expressed as -- date ranges which can be sent to the chart services. -- -- <http://www.last.fm/api/show/group.getWeeklyChartList> getWeeklyChartList :: Request f (Group -> APIKey -> Ready) getWeeklyChartList = api "group.getWeeklyChartList" -- | Get a track chart for a group, for a given date range. -- If no date range is supplied, it will return the most recent album chart for this group. -- -- Optional: 'from', 'to' -- -- <http://www.last.fm/api/show/group.getWeeklyTrackChart> getWeeklyTrackChart :: Request f (Group -> APIKey -> Ready) getWeeklyTrackChart = api "group.getWeeklyTrackChart"
supki/liblastfm
src/Lastfm/Group.hs
mit
2,117
0
8
319
244
149
95
17
1
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS -Wall -Werror -funbox-strict-fields #-} module FP.ImportWizard.Import ( module FP.ImportWizard.Import , module X ) where import BasicPrelude as X hiding (delete, deleteBy, insert, insertBy) import Data.Default import qualified GHC.IO import Language.Haskell.TH import Yesod as X import Yesod.Default.Util import Yesod.Form.Jquery as X (urlJqueryJs) import FP.ImportWizard.Foundation as X widgetFile :: GHC.IO.FilePath -> ExpQ widgetFile = widgetFileReload def
teuffy/min-var-ci
src/FP/ImportWizard/Import.hs
mit
722
0
6
244
112
76
36
17
1
module SyntheticWeb.RandomData ( randomData ) where import qualified Data.ByteString.Lazy.Char8 as LBS import System.Random (mkStdGen, randomRs) randomData :: LBS.ByteString randomData = go (mkStdGen 42) where go = LBS.pack . randomRs ('0', 'z')
kosmoskatten/synthetic-web
src/SyntheticWeb/RandomData.hs
mit
258
0
9
43
77
46
31
7
1
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TupleSections #-} -- Copyright : (c) 2019 Robert Künnemann -- License : GPL v3 (see LICENSE) -- -- Maintainer : Robert Künnemann <[email protected]> -- Portability : GHC only -- -- Compute a functiont hat maps positions in a process to where they will need -- to move to ensure local progress whereever possible module Sapic.ProgressFunction ( pfFrom ,pf ,ProgressFunction ,pfRange ,pfInv ) where import Data.Typeable import Control.Monad.Catch import Control.Monad import Theory.Sapic import Sapic.ProcessUtils import qualified Data.Set as S import qualified Data.List as L import qualified Data.Map.Strict as M type ProgressFunction = M.Map ProcessPosition (S.Set (S.Set ProcessPosition)) --- | suffix list p to each element of set *) (<.>) :: Ord a => [a] -> S.Set [a] -> S.Set [a] (<.>) pos set = S.map (\pos' -> pos ++ pos' ) set --- | suffix list p to each element in a set of set of sets *) (<..>) :: Ord a => [a] -> S.Set (S.Set [a]) -> S.Set (S.Set [a]) (<..>) pos setset = S.map (\set' -> pos <.> set') setset -- -- | Combinators that are exclusive, i.e., only one child can be visited -- isExclusive (Cond _) = True -- isExclusive (CondEq _ _) = True -- isExclusive (Lookup _ _) = True -- isExclusive _ = False -- | Actions that are blocking isBlockingAct :: SapicAction -> Bool isBlockingAct Rep = True isBlockingAct (ChIn _ _) = True isBlockingAct _ = False -- | determine whether process is blocking blocking :: AnProcess ann -> Bool blocking (ProcessNull _) = True blocking (ProcessAction ac _ _ ) = isBlockingAct ac blocking (ProcessComb NDC _ pl pr) = blocking pl && blocking pr blocking _ = False -- | next position to jump to next :: (Num a, Ord a) => AnProcess ann -> S.Set [a] next ProcessNull {} = S.empty next ProcessAction {} = S.singleton [1] next (ProcessComb NDC _ pl pr) = nextOrChild pl [1] `S.union` nextOrChild pr [2] where nextOrChild p' pos = if blocking p' then pos <.> next p' else S.singleton pos next ProcessComb{} = S.fromList $ [[1],[2]] -- | next position to jump but consider empty position for null process, used in pi next0 :: (Num a, Ord a) => AnProcess ann -> S.Set [a] next0 ProcessNull {} = S.singleton [] next0 ProcessAction {} = S.singleton [1] next0 (ProcessComb NDC _ pl pr) = next0OrChild pl [1] `S.union` next0OrChild pr [2] where next0OrChild p' pos = if blocking p' then pos <.> next0 p' else S.singleton pos next0 ProcessComb{} = S.fromList [[1],[2]] pfFrom :: (MonadCatch m, Show ann, Typeable ann) => AnProcess ann -> m (S.Set ProcessPosition) pfFrom process = from' process True where from' proc b | ProcessNull _ <- proc = return S.empty | otherwise = do res <- foldM (addWithRecursive proc) S.empty (next proc) return $ singletonOrEmpty (conditionAction proc b) `S.union` res singletonOrEmpty True = S.singleton [] singletonOrEmpty False = S.empty conditionAction proc b = not (blocking proc) && b -- condition to add singleton set is given, see Def. 14 in paper addWithRecursive proc accu pos = do p' <- processAt proc pos res <- from' p' (blocking proc) return $ accu `S.union` (pos <.> res) -- | Combine set of sets of position so that they describe alternatives (see comment for progressTo) -- combine x y = { union of xi and yi | xi in x and yi in y} combine :: Ord a => S.Set (S.Set a) -> S.Set (S.Set a) -> S.Set (S.Set a) combine x y = S.foldr (combineWith y) S.empty x -- | Take x_i, take union with y_i for all y_i in y and add result to accumulator set1. combineWith :: Ord a => S.Set (S.Set a) -> S.Set a -> S.Set (S.Set a) -> S.Set (S.Set a) combineWith y x_i set1 = S.foldr (\y_i set2 -> (x_i `S.union` y_i) `S.insert` set2) set1 y -- | Given a process p, find set of set of positions describing the conjunctive -- normal form of the positions that we need to go to. -- For example: {{p1},{p2,p3}} means we need to go to p1 AND to either p2 or p3. -- Correspond to f in Def. 15 f :: (Show ann, MonadCatch m, Typeable ann) => AnProcess ann -> m (S.Set (S.Set ProcessPosition)) f p -- corresponds to f within generate progressfunction.ml | blocking p = return $ ss [] | (ProcessComb Parallel _ pl pr) <- p = do ll <- f pl lr <- f pr return $ S.union ([1] <..> ll) ([2] <..> lr) | otherwise = foldM combineWithRecursive (S.singleton S.empty) -- accumulator, set of sets of position -- not that the Singleton set of the empty set is -- the neutral element with respect to combine -- the empty set combined with anything gives an emptyset (next0 p) -- list of p∈next^0(proc) where ss x = S.singleton ( S.singleton x) -- shortcut for singleton set of singleton set combineWithRecursive acc pos = do -- combine pss with positions from recursive call (case of nested NDCs) proc' <- processAt p pos lpos <- f proc' return $ combine (pos <..> lpos) acc -- | Compute progress function of proc pf :: (Show ann, MonadCatch m, Typeable ann) => AnProcess ann -> ProcessPosition -> m (S.Set (S.Set ProcessPosition)) pf proc pos = do proc' <- processAt proc pos res <- f proc' return $ pos <..> res flatten :: Ord a => S.Set (S.Set a) -> S.Set a flatten = S.foldr S.union S.empty pfRange' :: (Show ann, Typeable ann, MonadCatch m) => AnProcess ann -> m (S.Set (ProcessPosition, ProcessPosition)) pfRange' proc = do froms <- pfFrom proc foldM mapFlat S.empty froms where mapFlat acc pos = do res <- flatten <$> pf proc pos return (acc `S.union` S.map (,pos) res) pfRange :: (Show ann, Typeable ann, MonadCatch m) => AnProcess ann -> m (S.Set ProcessPosition) pfRange proc = do set <- pfRange' proc return $ S.map fst set pfInv :: (Show ann, Typeable ann, MonadCatch m) => AnProcess ann -> m (ProcessPosition -> Maybe ProcessPosition) pfInv proc = do set <- pfRange' proc return $ \x -> snd <$> L.find (\(to,_) -> to == x ) (S.toList set)
tamarin-prover/tamarin-prover
lib/sapic/src/Sapic/ProgressFunction.hs
gpl-3.0
6,831
0
14
2,059
1,992
1,019
973
101
2
-- Chapter 5 of book.RealWorldHaskell.org -- A sample JSON reading and formatting library for RealWorldHaskell module SimpleJSON ( JValue(..), getString, getInt, getDouble, getBool, getObject, getArray, isNull ) where data JValue = JString String | JNumber Double | JBool Bool | JNull | JObject [(String, JValue)] | JArray [JValue] deriving (Eq, Ord, Show) --Value getting functions getString :: JValue -> Maybe String getString (JString s) = Just s getString _ = Nothing getInt :: JValue -> Maybe Int getInt (JNumber n) = Just (truncate n) getInt _ = Nothing getDouble :: JValue -> Maybe Double getDouble (JNumber d) = Just d getDouble _ = Nothing getBool:: JValue -> Maybe Bool getBool (JBool b) = Just b getBool _ = Nothing getObject :: JValue -> Maybe [(String,JValue)] getObject (JObject o) = Just o getObject _ = Nothing getArray :: JValue -> Maybe [JValue] getArray (JArray a) = Just a getArray _ = Nothing isNull v = v == JNull
jtwool/haskell-sandbox
SimpleJSON.hs
gpl-3.0
1,159
0
8
380
348
186
162
36
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.AndroidPublisher.Types.Sum -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.AndroidPublisher.Types.Sum where import Network.Google.Prelude hiding (Bytes) -- | Type of the Image. Providing an image type that refers to no images is a -- no-op. data EditsImagesDeleteallImageType = AppImageTypeUnspecified -- ^ @appImageTypeUnspecified@ -- Unspecified type. Do not use. | PhoneScreenshots -- ^ @phoneScreenshots@ -- Phone screenshot. | SevenInchScreenshots -- ^ @sevenInchScreenshots@ -- Seven inch screenshot. | TenInchScreenshots -- ^ @tenInchScreenshots@ -- Ten inch screenshot. | TvScreenshots -- ^ @tvScreenshots@ -- TV screenshot. | WearScreenshots -- ^ @wearScreenshots@ -- Wear screenshot. | Icon -- ^ @icon@ -- Icon. | FeatureGraphic -- ^ @featureGraphic@ -- Feature graphic. | TvBanner -- ^ @tvBanner@ -- TV banner. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsImagesDeleteallImageType instance FromHttpApiData EditsImagesDeleteallImageType where parseQueryParam = \case "appImageTypeUnspecified" -> Right AppImageTypeUnspecified "phoneScreenshots" -> Right PhoneScreenshots "sevenInchScreenshots" -> Right SevenInchScreenshots "tenInchScreenshots" -> Right TenInchScreenshots "tvScreenshots" -> Right TvScreenshots "wearScreenshots" -> Right WearScreenshots "icon" -> Right Icon "featureGraphic" -> Right FeatureGraphic "tvBanner" -> Right TvBanner x -> Left ("Unable to parse EditsImagesDeleteallImageType from: " <> x) instance ToHttpApiData EditsImagesDeleteallImageType where toQueryParam = \case AppImageTypeUnspecified -> "appImageTypeUnspecified" PhoneScreenshots -> "phoneScreenshots" SevenInchScreenshots -> "sevenInchScreenshots" TenInchScreenshots -> "tenInchScreenshots" TvScreenshots -> "tvScreenshots" WearScreenshots -> "wearScreenshots" Icon -> "icon" FeatureGraphic -> "featureGraphic" TvBanner -> "tvBanner" instance FromJSON EditsImagesDeleteallImageType where parseJSON = parseJSONText "EditsImagesDeleteallImageType" instance ToJSON EditsImagesDeleteallImageType where toJSON = toJSONText -- | The type of the product, e.g. a recurring subscription. data InAppProductPurchaseType = PurchaseTypeUnspecified -- ^ @purchaseTypeUnspecified@ -- Unspecified purchase type. | ManagedUser -- ^ @managedUser@ -- The default product type - one time purchase. | Subscription -- ^ @subscription@ -- In-app product with a recurring period. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable InAppProductPurchaseType instance FromHttpApiData InAppProductPurchaseType where parseQueryParam = \case "purchaseTypeUnspecified" -> Right PurchaseTypeUnspecified "managedUser" -> Right ManagedUser "subscription" -> Right Subscription x -> Left ("Unable to parse InAppProductPurchaseType from: " <> x) instance ToHttpApiData InAppProductPurchaseType where toQueryParam = \case PurchaseTypeUnspecified -> "purchaseTypeUnspecified" ManagedUser -> "managedUser" Subscription -> "subscription" instance FromJSON InAppProductPurchaseType where parseJSON = parseJSONText "InAppProductPurchaseType" instance ToJSON InAppProductPurchaseType where toJSON = toJSONText -- | Type of the Image. Providing an image type that refers to no images will -- return an empty response. data EditsImagesListImageType = EILITAppImageTypeUnspecified -- ^ @appImageTypeUnspecified@ -- Unspecified type. Do not use. | EILITPhoneScreenshots -- ^ @phoneScreenshots@ -- Phone screenshot. | EILITSevenInchScreenshots -- ^ @sevenInchScreenshots@ -- Seven inch screenshot. | EILITTenInchScreenshots -- ^ @tenInchScreenshots@ -- Ten inch screenshot. | EILITTvScreenshots -- ^ @tvScreenshots@ -- TV screenshot. | EILITWearScreenshots -- ^ @wearScreenshots@ -- Wear screenshot. | EILITIcon -- ^ @icon@ -- Icon. | EILITFeatureGraphic -- ^ @featureGraphic@ -- Feature graphic. | EILITTvBanner -- ^ @tvBanner@ -- TV banner. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsImagesListImageType instance FromHttpApiData EditsImagesListImageType where parseQueryParam = \case "appImageTypeUnspecified" -> Right EILITAppImageTypeUnspecified "phoneScreenshots" -> Right EILITPhoneScreenshots "sevenInchScreenshots" -> Right EILITSevenInchScreenshots "tenInchScreenshots" -> Right EILITTenInchScreenshots "tvScreenshots" -> Right EILITTvScreenshots "wearScreenshots" -> Right EILITWearScreenshots "icon" -> Right EILITIcon "featureGraphic" -> Right EILITFeatureGraphic "tvBanner" -> Right EILITTvBanner x -> Left ("Unable to parse EditsImagesListImageType from: " <> x) instance ToHttpApiData EditsImagesListImageType where toQueryParam = \case EILITAppImageTypeUnspecified -> "appImageTypeUnspecified" EILITPhoneScreenshots -> "phoneScreenshots" EILITSevenInchScreenshots -> "sevenInchScreenshots" EILITTenInchScreenshots -> "tenInchScreenshots" EILITTvScreenshots -> "tvScreenshots" EILITWearScreenshots -> "wearScreenshots" EILITIcon -> "icon" EILITFeatureGraphic -> "featureGraphic" EILITTvBanner -> "tvBanner" instance FromJSON EditsImagesListImageType where parseJSON = parseJSONText "EditsImagesListImageType" instance ToJSON EditsImagesListImageType where toJSON = toJSONText -- | Type of the Image. data EditsImagesUploadImageType = EIUITAppImageTypeUnspecified -- ^ @appImageTypeUnspecified@ -- Unspecified type. Do not use. | EIUITPhoneScreenshots -- ^ @phoneScreenshots@ -- Phone screenshot. | EIUITSevenInchScreenshots -- ^ @sevenInchScreenshots@ -- Seven inch screenshot. | EIUITTenInchScreenshots -- ^ @tenInchScreenshots@ -- Ten inch screenshot. | EIUITTvScreenshots -- ^ @tvScreenshots@ -- TV screenshot. | EIUITWearScreenshots -- ^ @wearScreenshots@ -- Wear screenshot. | EIUITIcon -- ^ @icon@ -- Icon. | EIUITFeatureGraphic -- ^ @featureGraphic@ -- Feature graphic. | EIUITTvBanner -- ^ @tvBanner@ -- TV banner. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsImagesUploadImageType instance FromHttpApiData EditsImagesUploadImageType where parseQueryParam = \case "appImageTypeUnspecified" -> Right EIUITAppImageTypeUnspecified "phoneScreenshots" -> Right EIUITPhoneScreenshots "sevenInchScreenshots" -> Right EIUITSevenInchScreenshots "tenInchScreenshots" -> Right EIUITTenInchScreenshots "tvScreenshots" -> Right EIUITTvScreenshots "wearScreenshots" -> Right EIUITWearScreenshots "icon" -> Right EIUITIcon "featureGraphic" -> Right EIUITFeatureGraphic "tvBanner" -> Right EIUITTvBanner x -> Left ("Unable to parse EditsImagesUploadImageType from: " <> x) instance ToHttpApiData EditsImagesUploadImageType where toQueryParam = \case EIUITAppImageTypeUnspecified -> "appImageTypeUnspecified" EIUITPhoneScreenshots -> "phoneScreenshots" EIUITSevenInchScreenshots -> "sevenInchScreenshots" EIUITTenInchScreenshots -> "tenInchScreenshots" EIUITTvScreenshots -> "tvScreenshots" EIUITWearScreenshots -> "wearScreenshots" EIUITIcon -> "icon" EIUITFeatureGraphic -> "featureGraphic" EIUITTvBanner -> "tvBanner" instance FromJSON EditsImagesUploadImageType where parseJSON = parseJSONText "EditsImagesUploadImageType" instance ToJSON EditsImagesUploadImageType where toJSON = toJSONText -- | The type of the deobfuscation file. data EditsDeobfuscationFilesUploadDeobfuscationFileType = DeobfuscationFileTypeUnspecified -- ^ @deobfuscationFileTypeUnspecified@ -- Unspecified deobfuscation file type. | Proguard -- ^ @proguard@ -- Proguard deobfuscation file type. | NATiveCode -- ^ @nativeCode@ -- Native debugging symbols file type. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsDeobfuscationFilesUploadDeobfuscationFileType instance FromHttpApiData EditsDeobfuscationFilesUploadDeobfuscationFileType where parseQueryParam = \case "deobfuscationFileTypeUnspecified" -> Right DeobfuscationFileTypeUnspecified "proguard" -> Right Proguard "nativeCode" -> Right NATiveCode x -> Left ("Unable to parse EditsDeobfuscationFilesUploadDeobfuscationFileType from: " <> x) instance ToHttpApiData EditsDeobfuscationFilesUploadDeobfuscationFileType where toQueryParam = \case DeobfuscationFileTypeUnspecified -> "deobfuscationFileTypeUnspecified" Proguard -> "proguard" NATiveCode -> "nativeCode" instance FromJSON EditsDeobfuscationFilesUploadDeobfuscationFileType where parseJSON = parseJSONText "EditsDeobfuscationFilesUploadDeobfuscationFileType" instance ToJSON EditsDeobfuscationFilesUploadDeobfuscationFileType where toJSON = toJSONText -- | The file type of the expansion file configuration which is being -- updated. data EditsExpansionFilesUploadExpansionFileType = ExpansionFileTypeUnspecified -- ^ @expansionFileTypeUnspecified@ -- Unspecified expansion file type. | Main -- ^ @main@ -- Main expansion file. | Patch' -- ^ @patch@ -- Patch expansion file. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsExpansionFilesUploadExpansionFileType instance FromHttpApiData EditsExpansionFilesUploadExpansionFileType where parseQueryParam = \case "expansionFileTypeUnspecified" -> Right ExpansionFileTypeUnspecified "main" -> Right Main "patch" -> Right Patch' x -> Left ("Unable to parse EditsExpansionFilesUploadExpansionFileType from: " <> x) instance ToHttpApiData EditsExpansionFilesUploadExpansionFileType where toQueryParam = \case ExpansionFileTypeUnspecified -> "expansionFileTypeUnspecified" Main -> "main" Patch' -> "patch" instance FromJSON EditsExpansionFilesUploadExpansionFileType where parseJSON = parseJSONText "EditsExpansionFilesUploadExpansionFileType" instance ToJSON EditsExpansionFilesUploadExpansionFileType where toJSON = toJSONText -- | The type of the deobfuscation file. data DeobfuscationFileSymbolType = DFSTDeobfuscationFileTypeUnspecified -- ^ @deobfuscationFileTypeUnspecified@ -- Unspecified deobfuscation file type. | DFSTProguard -- ^ @proguard@ -- Proguard deobfuscation file type. | DFSTNATiveCode -- ^ @nativeCode@ -- Native debugging symbols file type. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable DeobfuscationFileSymbolType instance FromHttpApiData DeobfuscationFileSymbolType where parseQueryParam = \case "deobfuscationFileTypeUnspecified" -> Right DFSTDeobfuscationFileTypeUnspecified "proguard" -> Right DFSTProguard "nativeCode" -> Right DFSTNATiveCode x -> Left ("Unable to parse DeobfuscationFileSymbolType from: " <> x) instance ToHttpApiData DeobfuscationFileSymbolType where toQueryParam = \case DFSTDeobfuscationFileTypeUnspecified -> "deobfuscationFileTypeUnspecified" DFSTProguard -> "proguard" DFSTNATiveCode -> "nativeCode" instance FromJSON DeobfuscationFileSymbolType where parseJSON = parseJSONText "DeobfuscationFileSymbolType" instance ToJSON DeobfuscationFileSymbolType where toJSON = toJSONText -- | The status of the release. data TrackReleaseStatus = StatusUnspecified -- ^ @statusUnspecified@ -- Unspecified status. | Draft -- ^ @draft@ -- The release\'s APKs are not being served to users. | InProgress -- ^ @inProgress@ -- The release\'s APKs are being served to a fraction of users, determined -- by \'user_fraction\'. | Halted -- ^ @halted@ -- The release\'s APKs will no longer be served to users. Users who already -- have these APKs are unaffected. | Completed -- ^ @completed@ -- The release will have no further changes. Its APKs are being served to -- all users, unless they are eligible to APKs of a more recent release. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable TrackReleaseStatus instance FromHttpApiData TrackReleaseStatus where parseQueryParam = \case "statusUnspecified" -> Right StatusUnspecified "draft" -> Right Draft "inProgress" -> Right InProgress "halted" -> Right Halted "completed" -> Right Completed x -> Left ("Unable to parse TrackReleaseStatus from: " <> x) instance ToHttpApiData TrackReleaseStatus where toQueryParam = \case StatusUnspecified -> "statusUnspecified" Draft -> "draft" InProgress -> "inProgress" Halted -> "halted" Completed -> "completed" instance FromJSON TrackReleaseStatus where parseJSON = parseJSONText "TrackReleaseStatus" instance ToJSON TrackReleaseStatus where toJSON = toJSONText -- | V1 error format. data Xgafv = X1 -- ^ @1@ -- v1 error format | X2 -- ^ @2@ -- v2 error format deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable Xgafv instance FromHttpApiData Xgafv where parseQueryParam = \case "1" -> Right X1 "2" -> Right X2 x -> Left ("Unable to parse Xgafv from: " <> x) instance ToHttpApiData Xgafv where toQueryParam = \case X1 -> "1" X2 -> "2" instance FromJSON Xgafv where parseJSON = parseJSONText "Xgafv" instance ToJSON Xgafv where toJSON = toJSONText -- | The file type of the file configuration which is being read or modified. data EditsExpansionFilesGetExpansionFileType = EEFGEFTExpansionFileTypeUnspecified -- ^ @expansionFileTypeUnspecified@ -- Unspecified expansion file type. | EEFGEFTMain -- ^ @main@ -- Main expansion file. | EEFGEFTPatch' -- ^ @patch@ -- Patch expansion file. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsExpansionFilesGetExpansionFileType instance FromHttpApiData EditsExpansionFilesGetExpansionFileType where parseQueryParam = \case "expansionFileTypeUnspecified" -> Right EEFGEFTExpansionFileTypeUnspecified "main" -> Right EEFGEFTMain "patch" -> Right EEFGEFTPatch' x -> Left ("Unable to parse EditsExpansionFilesGetExpansionFileType from: " <> x) instance ToHttpApiData EditsExpansionFilesGetExpansionFileType where toQueryParam = \case EEFGEFTExpansionFileTypeUnspecified -> "expansionFileTypeUnspecified" EEFGEFTMain -> "main" EEFGEFTPatch' -> "patch" instance FromJSON EditsExpansionFilesGetExpansionFileType where parseJSON = parseJSONText "EditsExpansionFilesGetExpansionFileType" instance ToJSON EditsExpansionFilesGetExpansionFileType where toJSON = toJSONText -- | The file type of the expansion file configuration which is being -- updated. data EditsExpansionFilesPatchExpansionFileType = EEFPEFTExpansionFileTypeUnspecified -- ^ @expansionFileTypeUnspecified@ -- Unspecified expansion file type. | EEFPEFTMain -- ^ @main@ -- Main expansion file. | EEFPEFTPatch' -- ^ @patch@ -- Patch expansion file. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsExpansionFilesPatchExpansionFileType instance FromHttpApiData EditsExpansionFilesPatchExpansionFileType where parseQueryParam = \case "expansionFileTypeUnspecified" -> Right EEFPEFTExpansionFileTypeUnspecified "main" -> Right EEFPEFTMain "patch" -> Right EEFPEFTPatch' x -> Left ("Unable to parse EditsExpansionFilesPatchExpansionFileType from: " <> x) instance ToHttpApiData EditsExpansionFilesPatchExpansionFileType where toQueryParam = \case EEFPEFTExpansionFileTypeUnspecified -> "expansionFileTypeUnspecified" EEFPEFTMain -> "main" EEFPEFTPatch' -> "patch" instance FromJSON EditsExpansionFilesPatchExpansionFileType where parseJSON = parseJSONText "EditsExpansionFilesPatchExpansionFileType" instance ToJSON EditsExpansionFilesPatchExpansionFileType where toJSON = toJSONText -- | The file type of the file configuration which is being read or modified. data EditsExpansionFilesUpdateExpansionFileType = EEFUEFTExpansionFileTypeUnspecified -- ^ @expansionFileTypeUnspecified@ -- Unspecified expansion file type. | EEFUEFTMain -- ^ @main@ -- Main expansion file. | EEFUEFTPatch' -- ^ @patch@ -- Patch expansion file. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsExpansionFilesUpdateExpansionFileType instance FromHttpApiData EditsExpansionFilesUpdateExpansionFileType where parseQueryParam = \case "expansionFileTypeUnspecified" -> Right EEFUEFTExpansionFileTypeUnspecified "main" -> Right EEFUEFTMain "patch" -> Right EEFUEFTPatch' x -> Left ("Unable to parse EditsExpansionFilesUpdateExpansionFileType from: " <> x) instance ToHttpApiData EditsExpansionFilesUpdateExpansionFileType where toQueryParam = \case EEFUEFTExpansionFileTypeUnspecified -> "expansionFileTypeUnspecified" EEFUEFTMain -> "main" EEFUEFTPatch' -> "patch" instance FromJSON EditsExpansionFilesUpdateExpansionFileType where parseJSON = parseJSONText "EditsExpansionFilesUpdateExpansionFileType" instance ToJSON EditsExpansionFilesUpdateExpansionFileType where toJSON = toJSONText -- | Type of the Image. data EditsImagesDeleteImageType = EIDITAppImageTypeUnspecified -- ^ @appImageTypeUnspecified@ -- Unspecified type. Do not use. | EIDITPhoneScreenshots -- ^ @phoneScreenshots@ -- Phone screenshot. | EIDITSevenInchScreenshots -- ^ @sevenInchScreenshots@ -- Seven inch screenshot. | EIDITTenInchScreenshots -- ^ @tenInchScreenshots@ -- Ten inch screenshot. | EIDITTvScreenshots -- ^ @tvScreenshots@ -- TV screenshot. | EIDITWearScreenshots -- ^ @wearScreenshots@ -- Wear screenshot. | EIDITIcon -- ^ @icon@ -- Icon. | EIDITFeatureGraphic -- ^ @featureGraphic@ -- Feature graphic. | EIDITTvBanner -- ^ @tvBanner@ -- TV banner. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable EditsImagesDeleteImageType instance FromHttpApiData EditsImagesDeleteImageType where parseQueryParam = \case "appImageTypeUnspecified" -> Right EIDITAppImageTypeUnspecified "phoneScreenshots" -> Right EIDITPhoneScreenshots "sevenInchScreenshots" -> Right EIDITSevenInchScreenshots "tenInchScreenshots" -> Right EIDITTenInchScreenshots "tvScreenshots" -> Right EIDITTvScreenshots "wearScreenshots" -> Right EIDITWearScreenshots "icon" -> Right EIDITIcon "featureGraphic" -> Right EIDITFeatureGraphic "tvBanner" -> Right EIDITTvBanner x -> Left ("Unable to parse EditsImagesDeleteImageType from: " <> x) instance ToHttpApiData EditsImagesDeleteImageType where toQueryParam = \case EIDITAppImageTypeUnspecified -> "appImageTypeUnspecified" EIDITPhoneScreenshots -> "phoneScreenshots" EIDITSevenInchScreenshots -> "sevenInchScreenshots" EIDITTenInchScreenshots -> "tenInchScreenshots" EIDITTvScreenshots -> "tvScreenshots" EIDITWearScreenshots -> "wearScreenshots" EIDITIcon -> "icon" EIDITFeatureGraphic -> "featureGraphic" EIDITTvBanner -> "tvBanner" instance FromJSON EditsImagesDeleteImageType where parseJSON = parseJSONText "EditsImagesDeleteImageType" instance ToJSON EditsImagesDeleteImageType where toJSON = toJSONText -- | The status of the product, e.g. whether it\'s active. data InAppProductStatus = IAPSStatusUnspecified -- ^ @statusUnspecified@ -- Unspecified status. | IAPSActive -- ^ @active@ -- The product is published and active in the store. | IAPSInactive -- ^ @inactive@ -- The product is not published and therefore inactive in the store. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable InAppProductStatus instance FromHttpApiData InAppProductStatus where parseQueryParam = \case "statusUnspecified" -> Right IAPSStatusUnspecified "active" -> Right IAPSActive "inactive" -> Right IAPSInactive x -> Left ("Unable to parse InAppProductStatus from: " <> x) instance ToHttpApiData InAppProductStatus where toQueryParam = \case IAPSStatusUnspecified -> "statusUnspecified" IAPSActive -> "active" IAPSInactive -> "inactive" instance FromJSON InAppProductStatus where parseJSON = parseJSONText "InAppProductStatus" instance ToJSON InAppProductStatus where toJSON = toJSONText
brendanhay/gogol
gogol-android-publisher/gen/Network/Google/AndroidPublisher/Types/Sum.hs
mpl-2.0
22,271
0
11
4,827
3,090
1,655
1,435
377
0
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.AlertCenter.Types.Sum -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- module Network.Google.AlertCenter.Types.Sum where import Network.Google.Prelude hiding (Bytes) -- | Source of the data. data RuleViolationInfoDataSource = DataSourceUnspecified -- ^ @DATA_SOURCE_UNSPECIFIED@ -- Data source is unspecified. | Drive -- ^ @DRIVE@ -- Drive data source. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable RuleViolationInfoDataSource instance FromHttpApiData RuleViolationInfoDataSource where parseQueryParam = \case "DATA_SOURCE_UNSPECIFIED" -> Right DataSourceUnspecified "DRIVE" -> Right Drive x -> Left ("Unable to parse RuleViolationInfoDataSource from: " <> x) instance ToHttpApiData RuleViolationInfoDataSource where toQueryParam = \case DataSourceUnspecified -> "DATA_SOURCE_UNSPECIFIED" Drive -> "DRIVE" instance FromJSON RuleViolationInfoDataSource where parseJSON = parseJSONText "RuleViolationInfoDataSource" instance ToJSON RuleViolationInfoDataSource where toJSON = toJSONText -- | System actions on the messages. data MailPhishingSystemActionType = SystemActionTypeUnspecified -- ^ @SYSTEM_ACTION_TYPE_UNSPECIFIED@ -- System action is unspecified. | NoOperation -- ^ @NO_OPERATION@ -- No operation. | RemovedFromInbox -- ^ @REMOVED_FROM_INBOX@ -- Messages were removed from the inbox. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable MailPhishingSystemActionType instance FromHttpApiData MailPhishingSystemActionType where parseQueryParam = \case "SYSTEM_ACTION_TYPE_UNSPECIFIED" -> Right SystemActionTypeUnspecified "NO_OPERATION" -> Right NoOperation "REMOVED_FROM_INBOX" -> Right RemovedFromInbox x -> Left ("Unable to parse MailPhishingSystemActionType from: " <> x) instance ToHttpApiData MailPhishingSystemActionType where toQueryParam = \case SystemActionTypeUnspecified -> "SYSTEM_ACTION_TYPE_UNSPECIFIED" NoOperation -> "NO_OPERATION" RemovedFromInbox -> "REMOVED_FROM_INBOX" instance FromJSON MailPhishingSystemActionType where parseJSON = parseJSONText "MailPhishingSystemActionType" instance ToJSON MailPhishingSystemActionType where toJSON = toJSONText data RuleViolationInfoTriggeredActionTypesItem = RVITATIActionTypeUnspecified -- ^ @ACTION_TYPE_UNSPECIFIED@ -- Action type is unspecified. | RVITATIDriveBlockExternalSharing -- ^ @DRIVE_BLOCK_EXTERNAL_SHARING@ -- Block sharing a file externally. | RVITATIDriveWarnOnExternalSharing -- ^ @DRIVE_WARN_ON_EXTERNAL_SHARING@ -- Show a warning message when sharing a file externally. | RVITATIAlert -- ^ @ALERT@ -- Send alert. | RVITATIRuleActivate -- ^ @RULE_ACTIVATE@ -- Activate Rule Action | RVITATIRuleDeactivate -- ^ @RULE_DEACTIVATE@ -- Deactivate Rule Action deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable RuleViolationInfoTriggeredActionTypesItem instance FromHttpApiData RuleViolationInfoTriggeredActionTypesItem where parseQueryParam = \case "ACTION_TYPE_UNSPECIFIED" -> Right RVITATIActionTypeUnspecified "DRIVE_BLOCK_EXTERNAL_SHARING" -> Right RVITATIDriveBlockExternalSharing "DRIVE_WARN_ON_EXTERNAL_SHARING" -> Right RVITATIDriveWarnOnExternalSharing "ALERT" -> Right RVITATIAlert "RULE_ACTIVATE" -> Right RVITATIRuleActivate "RULE_DEACTIVATE" -> Right RVITATIRuleDeactivate x -> Left ("Unable to parse RuleViolationInfoTriggeredActionTypesItem from: " <> x) instance ToHttpApiData RuleViolationInfoTriggeredActionTypesItem where toQueryParam = \case RVITATIActionTypeUnspecified -> "ACTION_TYPE_UNSPECIFIED" RVITATIDriveBlockExternalSharing -> "DRIVE_BLOCK_EXTERNAL_SHARING" RVITATIDriveWarnOnExternalSharing -> "DRIVE_WARN_ON_EXTERNAL_SHARING" RVITATIAlert -> "ALERT" RVITATIRuleActivate -> "RULE_ACTIVATE" RVITATIRuleDeactivate -> "RULE_DEACTIVATE" instance FromJSON RuleViolationInfoTriggeredActionTypesItem where parseJSON = parseJSONText "RuleViolationInfoTriggeredActionTypesItem" instance ToJSON RuleViolationInfoTriggeredActionTypesItem where toJSON = toJSONText -- | Current outage status. data AppsOutageStatus = StatusUnspecified -- ^ @STATUS_UNSPECIFIED@ -- Status is unspecified. | New -- ^ @NEW@ -- The incident has just been reported. | Ongoing -- ^ @ONGOING@ -- The incidnet is ongoing. | Resolved -- ^ @RESOLVED@ -- The incident has been resolved. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable AppsOutageStatus instance FromHttpApiData AppsOutageStatus where parseQueryParam = \case "STATUS_UNSPECIFIED" -> Right StatusUnspecified "NEW" -> Right New "ONGOING" -> Right Ongoing "RESOLVED" -> Right Resolved x -> Left ("Unable to parse AppsOutageStatus from: " <> x) instance ToHttpApiData AppsOutageStatus where toQueryParam = \case StatusUnspecified -> "STATUS_UNSPECIFIED" New -> "NEW" Ongoing -> "ONGOING" Resolved -> "RESOLVED" instance FromJSON AppsOutageStatus where parseJSON = parseJSONText "AppsOutageStatus" instance ToJSON AppsOutageStatus where toJSON = toJSONText -- | Optional. The format of the payload that would be sent. If not specified -- the format will be JSON. data CloudPubsubTopicPayloadFormat = PayloadFormatUnspecified -- ^ @PAYLOAD_FORMAT_UNSPECIFIED@ -- Payload format is not specified (will use JSON as default). | JSON -- ^ @JSON@ -- Use JSON. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable CloudPubsubTopicPayloadFormat instance FromHttpApiData CloudPubsubTopicPayloadFormat where parseQueryParam = \case "PAYLOAD_FORMAT_UNSPECIFIED" -> Right PayloadFormatUnspecified "JSON" -> Right JSON x -> Left ("Unable to parse CloudPubsubTopicPayloadFormat from: " <> x) instance ToHttpApiData CloudPubsubTopicPayloadFormat where toQueryParam = \case PayloadFormatUnspecified -> "PAYLOAD_FORMAT_UNSPECIFIED" JSON -> "JSON" instance FromJSON CloudPubsubTopicPayloadFormat where parseJSON = parseJSONText "CloudPubsubTopicPayloadFormat" instance ToJSON CloudPubsubTopicPayloadFormat where toJSON = toJSONText -- | Required. The type of the feedback. data AlertFeedbackType = AlertFeedbackTypeUnspecified -- ^ @ALERT_FEEDBACK_TYPE_UNSPECIFIED@ -- The feedback type is not specified. | NotUseful -- ^ @NOT_USEFUL@ -- The alert report is not useful. | SomewhatUseful -- ^ @SOMEWHAT_USEFUL@ -- The alert report is somewhat useful. | VeryUseful -- ^ @VERY_USEFUL@ -- The alert report is very useful. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable AlertFeedbackType instance FromHttpApiData AlertFeedbackType where parseQueryParam = \case "ALERT_FEEDBACK_TYPE_UNSPECIFIED" -> Right AlertFeedbackTypeUnspecified "NOT_USEFUL" -> Right NotUseful "SOMEWHAT_USEFUL" -> Right SomewhatUseful "VERY_USEFUL" -> Right VeryUseful x -> Left ("Unable to parse AlertFeedbackType from: " <> x) instance ToHttpApiData AlertFeedbackType where toQueryParam = \case AlertFeedbackTypeUnspecified -> "ALERT_FEEDBACK_TYPE_UNSPECIFIED" NotUseful -> "NOT_USEFUL" SomewhatUseful -> "SOMEWHAT_USEFUL" VeryUseful -> "VERY_USEFUL" instance FromJSON AlertFeedbackType where parseJSON = parseJSONText "AlertFeedbackType" instance ToJSON AlertFeedbackType where toJSON = toJSONText -- | V1 error format. data Xgafv = X1 -- ^ @1@ -- v1 error format | X2 -- ^ @2@ -- v2 error format deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable Xgafv instance FromHttpApiData Xgafv where parseQueryParam = \case "1" -> Right X1 "2" -> Right X2 x -> Left ("Unable to parse Xgafv from: " <> x) instance ToHttpApiData Xgafv where toQueryParam = \case X1 -> "1" X2 -> "2" instance FromJSON Xgafv where parseJSON = parseJSONText "Xgafv" instance ToJSON Xgafv where toJSON = toJSONText data RuleViolationInfoSuppressedActionTypesItem = RVISATIActionTypeUnspecified -- ^ @ACTION_TYPE_UNSPECIFIED@ -- Action type is unspecified. | RVISATIDriveBlockExternalSharing -- ^ @DRIVE_BLOCK_EXTERNAL_SHARING@ -- Block sharing a file externally. | RVISATIDriveWarnOnExternalSharing -- ^ @DRIVE_WARN_ON_EXTERNAL_SHARING@ -- Show a warning message when sharing a file externally. | RVISATIAlert -- ^ @ALERT@ -- Send alert. | RVISATIRuleActivate -- ^ @RULE_ACTIVATE@ -- Activate Rule Action | RVISATIRuleDeactivate -- ^ @RULE_DEACTIVATE@ -- Deactivate Rule Action deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable RuleViolationInfoSuppressedActionTypesItem instance FromHttpApiData RuleViolationInfoSuppressedActionTypesItem where parseQueryParam = \case "ACTION_TYPE_UNSPECIFIED" -> Right RVISATIActionTypeUnspecified "DRIVE_BLOCK_EXTERNAL_SHARING" -> Right RVISATIDriveBlockExternalSharing "DRIVE_WARN_ON_EXTERNAL_SHARING" -> Right RVISATIDriveWarnOnExternalSharing "ALERT" -> Right RVISATIAlert "RULE_ACTIVATE" -> Right RVISATIRuleActivate "RULE_DEACTIVATE" -> Right RVISATIRuleDeactivate x -> Left ("Unable to parse RuleViolationInfoSuppressedActionTypesItem from: " <> x) instance ToHttpApiData RuleViolationInfoSuppressedActionTypesItem where toQueryParam = \case RVISATIActionTypeUnspecified -> "ACTION_TYPE_UNSPECIFIED" RVISATIDriveBlockExternalSharing -> "DRIVE_BLOCK_EXTERNAL_SHARING" RVISATIDriveWarnOnExternalSharing -> "DRIVE_WARN_ON_EXTERNAL_SHARING" RVISATIAlert -> "ALERT" RVISATIRuleActivate -> "RULE_ACTIVATE" RVISATIRuleDeactivate -> "RULE_DEACTIVATE" instance FromJSON RuleViolationInfoSuppressedActionTypesItem where parseJSON = parseJSONText "RuleViolationInfoSuppressedActionTypesItem" instance ToJSON RuleViolationInfoSuppressedActionTypesItem where toJSON = toJSONText -- | Trigger of the rule. data RuleViolationInfoTrigger = TriggerUnspecified -- ^ @TRIGGER_UNSPECIFIED@ -- Trigger is unspecified. | DriveShare -- ^ @DRIVE_SHARE@ -- A Drive file is shared. deriving (Eq, Ord, Enum, Read, Show, Data, Typeable, Generic) instance Hashable RuleViolationInfoTrigger instance FromHttpApiData RuleViolationInfoTrigger where parseQueryParam = \case "TRIGGER_UNSPECIFIED" -> Right TriggerUnspecified "DRIVE_SHARE" -> Right DriveShare x -> Left ("Unable to parse RuleViolationInfoTrigger from: " <> x) instance ToHttpApiData RuleViolationInfoTrigger where toQueryParam = \case TriggerUnspecified -> "TRIGGER_UNSPECIFIED" DriveShare -> "DRIVE_SHARE" instance FromJSON RuleViolationInfoTrigger where parseJSON = parseJSONText "RuleViolationInfoTrigger" instance ToJSON RuleViolationInfoTrigger where toJSON = toJSONText
brendanhay/gogol
gogol-alertcenter/gen/Network/Google/AlertCenter/Types/Sum.hs
mpl-2.0
12,003
0
11
2,503
1,752
936
816
209
0
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.CloudResourceManager.Projects.Delete -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Marks the project identified by the specified \`name\` (for example, -- \`projects\/415104041262\`) for deletion. This method will only affect -- the project if it has a lifecycle state of ACTIVE. This method changes -- the Project\'s lifecycle state from ACTIVE to DELETE_REQUESTED. The -- deletion starts at an unspecified time, at which point the Project is no -- longer accessible. Until the deletion completes, you can check the -- lifecycle state checked by retrieving the project with GetProject, and -- the project remains visible to ListProjects. However, you cannot update -- the project. After the deletion completes, the project is not -- retrievable by the GetProject, ListProjects, and SearchProjects methods. -- This method behaves idempotently, such that deleting a -- \`DELETE_REQUESTED\` project will not cause an error, but also won\'t do -- anything. The caller must have \`resourcemanager.projects.delete\` -- permissions for this project. -- -- /See:/ <https://cloud.google.com/resource-manager Cloud Resource Manager API Reference> for @cloudresourcemanager.projects.delete@. module Network.Google.Resource.CloudResourceManager.Projects.Delete ( -- * REST Resource ProjectsDeleteResource -- * Creating a Request , projectsDelete , ProjectsDelete -- * Request Lenses , pdXgafv , pdUploadProtocol , pdAccessToken , pdUploadType , pdName , pdCallback ) where import Network.Google.Prelude import Network.Google.ResourceManager.Types -- | A resource alias for @cloudresourcemanager.projects.delete@ method which the -- 'ProjectsDelete' request conforms to. type ProjectsDeleteResource = "v3" :> Capture "name" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Delete '[JSON] Operation -- | Marks the project identified by the specified \`name\` (for example, -- \`projects\/415104041262\`) for deletion. This method will only affect -- the project if it has a lifecycle state of ACTIVE. This method changes -- the Project\'s lifecycle state from ACTIVE to DELETE_REQUESTED. The -- deletion starts at an unspecified time, at which point the Project is no -- longer accessible. Until the deletion completes, you can check the -- lifecycle state checked by retrieving the project with GetProject, and -- the project remains visible to ListProjects. However, you cannot update -- the project. After the deletion completes, the project is not -- retrievable by the GetProject, ListProjects, and SearchProjects methods. -- This method behaves idempotently, such that deleting a -- \`DELETE_REQUESTED\` project will not cause an error, but also won\'t do -- anything. The caller must have \`resourcemanager.projects.delete\` -- permissions for this project. -- -- /See:/ 'projectsDelete' smart constructor. data ProjectsDelete = ProjectsDelete' { _pdXgafv :: !(Maybe Xgafv) , _pdUploadProtocol :: !(Maybe Text) , _pdAccessToken :: !(Maybe Text) , _pdUploadType :: !(Maybe Text) , _pdName :: !Text , _pdCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'ProjectsDelete' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'pdXgafv' -- -- * 'pdUploadProtocol' -- -- * 'pdAccessToken' -- -- * 'pdUploadType' -- -- * 'pdName' -- -- * 'pdCallback' projectsDelete :: Text -- ^ 'pdName' -> ProjectsDelete projectsDelete pPdName_ = ProjectsDelete' { _pdXgafv = Nothing , _pdUploadProtocol = Nothing , _pdAccessToken = Nothing , _pdUploadType = Nothing , _pdName = pPdName_ , _pdCallback = Nothing } -- | V1 error format. pdXgafv :: Lens' ProjectsDelete (Maybe Xgafv) pdXgafv = lens _pdXgafv (\ s a -> s{_pdXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). pdUploadProtocol :: Lens' ProjectsDelete (Maybe Text) pdUploadProtocol = lens _pdUploadProtocol (\ s a -> s{_pdUploadProtocol = a}) -- | OAuth access token. pdAccessToken :: Lens' ProjectsDelete (Maybe Text) pdAccessToken = lens _pdAccessToken (\ s a -> s{_pdAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). pdUploadType :: Lens' ProjectsDelete (Maybe Text) pdUploadType = lens _pdUploadType (\ s a -> s{_pdUploadType = a}) -- | Required. The name of the Project (for example, -- \`projects\/415104041262\`). pdName :: Lens' ProjectsDelete Text pdName = lens _pdName (\ s a -> s{_pdName = a}) -- | JSONP pdCallback :: Lens' ProjectsDelete (Maybe Text) pdCallback = lens _pdCallback (\ s a -> s{_pdCallback = a}) instance GoogleRequest ProjectsDelete where type Rs ProjectsDelete = Operation type Scopes ProjectsDelete = '["https://www.googleapis.com/auth/cloud-platform"] requestClient ProjectsDelete'{..} = go _pdName _pdXgafv _pdUploadProtocol _pdAccessToken _pdUploadType _pdCallback (Just AltJSON) resourceManagerService where go = buildClient (Proxy :: Proxy ProjectsDeleteResource) mempty
brendanhay/gogol
gogol-resourcemanager/gen/Network/Google/Resource/CloudResourceManager/Projects/Delete.hs
mpl-2.0
6,207
0
15
1,275
721
432
289
98
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.IAM.ListGroupPolicies -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Lists the names of the inline policies that are embedded in the -- specified group. -- -- A group can also have managed policies attached to it. To list the -- managed policies that are attached to a group, use -- ListAttachedGroupPolicies. For more information about policies, refer to -- <http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html Managed Policies and Inline Policies> -- in the /IAM User Guide/. -- -- You can paginate the results using the 'MaxItems' and 'Marker' -- parameters. If there are no inline policies embedded with the specified -- group, the action returns an empty list. -- -- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_ListGroupPolicies.html AWS API Reference> for ListGroupPolicies. -- -- This operation returns paginated results. module Network.AWS.IAM.ListGroupPolicies ( -- * Creating a Request listGroupPolicies , ListGroupPolicies -- * Request Lenses , lgpMarker , lgpMaxItems , lgpGroupName -- * Destructuring the Response , listGroupPoliciesResponse , ListGroupPoliciesResponse -- * Response Lenses , lgprsMarker , lgprsIsTruncated , lgprsResponseStatus , lgprsPolicyNames ) where import Network.AWS.IAM.Types import Network.AWS.IAM.Types.Product import Network.AWS.Pager import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | /See:/ 'listGroupPolicies' smart constructor. data ListGroupPolicies = ListGroupPolicies' { _lgpMarker :: !(Maybe Text) , _lgpMaxItems :: !(Maybe Nat) , _lgpGroupName :: !Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'ListGroupPolicies' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'lgpMarker' -- -- * 'lgpMaxItems' -- -- * 'lgpGroupName' listGroupPolicies :: Text -- ^ 'lgpGroupName' -> ListGroupPolicies listGroupPolicies pGroupName_ = ListGroupPolicies' { _lgpMarker = Nothing , _lgpMaxItems = Nothing , _lgpGroupName = pGroupName_ } -- | Use this parameter only when paginating results and only after you -- receive a response indicating that the results are truncated. Set it to -- the value of the 'Marker' element in the response you received to inform -- the next call about where to start. lgpMarker :: Lens' ListGroupPolicies (Maybe Text) lgpMarker = lens _lgpMarker (\ s a -> s{_lgpMarker = a}); -- | Use this only when paginating results to indicate the maximum number of -- items you want in the response. If there are additional items beyond the -- maximum you specify, the 'IsTruncated' response element is 'true'. -- -- This parameter is optional. If you do not include it, it defaults to -- 100. Note that IAM might return fewer results, even when there are more -- results available. If this is the case, the 'IsTruncated' response -- element returns 'true' and 'Marker' contains a value to include in the -- subsequent call that tells the service where to continue from. lgpMaxItems :: Lens' ListGroupPolicies (Maybe Natural) lgpMaxItems = lens _lgpMaxItems (\ s a -> s{_lgpMaxItems = a}) . mapping _Nat; -- | The name of the group to list policies for. lgpGroupName :: Lens' ListGroupPolicies Text lgpGroupName = lens _lgpGroupName (\ s a -> s{_lgpGroupName = a}); instance AWSPager ListGroupPolicies where page rq rs | stop (rs ^. lgprsIsTruncated) = Nothing | isNothing (rs ^. lgprsMarker) = Nothing | otherwise = Just $ rq & lgpMarker .~ rs ^. lgprsMarker instance AWSRequest ListGroupPolicies where type Rs ListGroupPolicies = ListGroupPoliciesResponse request = postQuery iAM response = receiveXMLWrapper "ListGroupPoliciesResult" (\ s h x -> ListGroupPoliciesResponse' <$> (x .@? "Marker") <*> (x .@? "IsTruncated") <*> (pure (fromEnum s)) <*> (x .@? "PolicyNames" .!@ mempty >>= parseXMLList "member")) instance ToHeaders ListGroupPolicies where toHeaders = const mempty instance ToPath ListGroupPolicies where toPath = const "/" instance ToQuery ListGroupPolicies where toQuery ListGroupPolicies'{..} = mconcat ["Action" =: ("ListGroupPolicies" :: ByteString), "Version" =: ("2010-05-08" :: ByteString), "Marker" =: _lgpMarker, "MaxItems" =: _lgpMaxItems, "GroupName" =: _lgpGroupName] -- | Contains the response to a successful ListGroupPolicies request. -- -- /See:/ 'listGroupPoliciesResponse' smart constructor. data ListGroupPoliciesResponse = ListGroupPoliciesResponse' { _lgprsMarker :: !(Maybe Text) , _lgprsIsTruncated :: !(Maybe Bool) , _lgprsResponseStatus :: !Int , _lgprsPolicyNames :: ![Text] } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'ListGroupPoliciesResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'lgprsMarker' -- -- * 'lgprsIsTruncated' -- -- * 'lgprsResponseStatus' -- -- * 'lgprsPolicyNames' listGroupPoliciesResponse :: Int -- ^ 'lgprsResponseStatus' -> ListGroupPoliciesResponse listGroupPoliciesResponse pResponseStatus_ = ListGroupPoliciesResponse' { _lgprsMarker = Nothing , _lgprsIsTruncated = Nothing , _lgprsResponseStatus = pResponseStatus_ , _lgprsPolicyNames = mempty } -- | When 'IsTruncated' is 'true', this element is present and contains the -- value to use for the 'Marker' parameter in a subsequent pagination -- request. lgprsMarker :: Lens' ListGroupPoliciesResponse (Maybe Text) lgprsMarker = lens _lgprsMarker (\ s a -> s{_lgprsMarker = a}); -- | A flag that indicates whether there are more items to return. If your -- results were truncated, you can make a subsequent pagination request -- using the 'Marker' request parameter to retrieve more items. Note that -- IAM might return fewer than the 'MaxItems' number of results even when -- there are more results available. We recommend that you check -- 'IsTruncated' after every call to ensure that you receive all of your -- results. lgprsIsTruncated :: Lens' ListGroupPoliciesResponse (Maybe Bool) lgprsIsTruncated = lens _lgprsIsTruncated (\ s a -> s{_lgprsIsTruncated = a}); -- | The response status code. lgprsResponseStatus :: Lens' ListGroupPoliciesResponse Int lgprsResponseStatus = lens _lgprsResponseStatus (\ s a -> s{_lgprsResponseStatus = a}); -- | A list of policy names. lgprsPolicyNames :: Lens' ListGroupPoliciesResponse [Text] lgprsPolicyNames = lens _lgprsPolicyNames (\ s a -> s{_lgprsPolicyNames = a}) . _Coerce;
olorin/amazonka
amazonka-iam/gen/Network/AWS/IAM/ListGroupPolicies.hs
mpl-2.0
7,593
0
14
1,586
999
595
404
112
1
{-| Module : Reflex.WX.Layout Description : This module re-exports appropriate functions from Graphics.UI.WXCore.Layout and provides the necessary instance declaration to make the functions work. License : wxWindows Library License Maintainer : [email protected] Stability : Experimental -} module Reflex.WX.Layout ( W.Layout , W.widget , W.rule , W.hrule , W.vrule , W.row , W.column , W.grid , W.boxed , W.glue , W.hglue , W.vglue , W.space , W.hspace , W.vspace , W.empty , W.dynamic , W.static , W.stretch , W.hstretch , W.vstretch , W.minsize , W.rigid , W.shaped , W.expand , W.fill , W.hfill , W.vfill , W.margin , W.marginWidth , W.marginNone , W.marginLeft , W.marginTop , W.marginRight , W.marginBottom , W.floatTopLeft , W.floatTop , W.floatTopRight , W.floatLeft , W.floatCentre , W.floatCenter , W.floatRight , W.floatBottomLeft , W.floatBottom , W.floatBottomRight , W.hfloatLeft , W.hfloatCentre , W.hfloatCenter , W.hfloatRight , W.vfloatTop , W.vfloatCentre , W.vfloatCenter , W.vfloatBottom , W.alignTopLeft , W.alignTop , W.alignTopRight , W.alignLeft , W.alignCentre , W.alignCenter , W.alignRight , W.alignBottomLeft , W.alignBottom , W.alignBottomRight , W.halignLeft , W.halignCentre , W.halignCenter , W.halignRight , W.valignTop , W.valignCentre , W.valignCenter , W.valignBottom ) where import qualified Graphics.UI.WXCore.Layout as W import Reflex.WX.Class instance W.Widget w => W.Widget (Widget t w) where widget w = W.widget (wxwidget w)
Pamelloes/reflex-wx
src/main/Reflex/WX/Layout.hs
lgpl-2.1
3,261
0
8
1,994
427
254
173
75
0
module Print where import General import Dictionary import Data.List (intersperse) -- printing morphological objects as strings prStr :: Str -> String prStr = concat . intersperse "/" . unStr prAlts :: Str -> String prAlts ss = case unStr ss of [] -> "*" ys -> unwords $ intersperse "/" ys consTable :: Str -> Table String consTable s = [("INVAR", s)] consTableW :: Str -> [(String,(Attr,Str))] consTableW s = [("INVAR", (noComp,s))] putFun0 :: Param a => (a -> Str) -> IO () putFun0 = putStr . unlines . map show . prTable . table putFun :: Param a => (a -> Str) -> IO () putFun = putStr . unlines . map pr . prTable . table where pr (a,ss) = a ++ " : " ++ prAlts ss -- print a parameter value without hierarchy (= parentheses) prFlat :: String -> String prFlat = filter (flip notElem "()") -- show all values for the first parameter prFirstForm :: Param a => Table a -> String prFirstForm = prStr . firstForm -- show one value for the first parameter (used in dictionary) prDictForm :: Param a => Table a -> String prDictForm = prDictStr . firstForm prDictStr :: Str -> String prDictStr t = case unStr t of s:_ -> s [] -> "NONE" prDictionary :: Dictionary -> String prDictionary = unlines . map (unlines . prOne) . removeAttr where prOne (stem, typ, inhs, infl) = stem : typ : unwords inhs : [a ++ ": " ++ prStr s | (a,s) <- infl] prFullFormLex :: FullFormLex -> String prFullFormLex = concat . map prOne where prOne (s,ps) = unlines [s ++ ":" ++ a | a <- map prAttr ps] -- prOne (s,ps) = s ++ " : " ++ unwords (intersperse "/" (map prAttr ps)) prAttr (a,ss) = ss ++ prCompAttr a prCompAttr :: Attr -> String prCompAttr a = " (" ++ show a ++ ") " -- should not happen... -- a parser {- -- parse full-form lexicon from the format we print; ignore unparsable lines pFullFormLex :: String -> FullFormLex pFullFormLex s = [r | l <- s', Just r <- [pOne (words l)]] where s' = filter nocomment (lines s) where nocomment l = case l of '-':'-':_ -> False -- use -- for comment lines _ -> True pOne (s:":":ps) = Just (s, (pPs ps)) pPs ws = pCompAttr p : ps where (p,ps0) = span (/="/") ws ps = if null ps0 then [] else pPs (tail ps0) pCompAttr p = case p of "(P)" : p' -> (atP, unwords p') "(WP)" : p' -> (atWP, unwords p') _ -> (atW, unwords p) -- ignoring values >2 -} -- generate GF source code -- N.B. syntax errors result from GF-reserved-word identifiers! prGFRes :: Dictionary -> String prGFRes dict = (unlines (map prGFOper (zip [0..] (removeAttr dict)))) prGFOper :: (Int,(String, Ident, [Ident], Table Ident)) -> String prGFOper (i,(oper, ty, inhs, tab0)) = begin ++ " : Str -> " ++ ty ++ " = " ++ bind ++ rec ++ end where begin = "oper " ++ (oper ++ "_" ++ show i) -- Reduce the number of name clashes! bind = "\\" ++ oper ++ " -> " ++ "\n let " ++ stemv ++ " = Predef.tk " ++ show lg1 ++ " " ++ oper ++ " in" stem = longestPrefix (unStr (formsInTable tab0)) stemv = if lg == 0 then "x_" else stem ++ "_" -- to avoid clash with res words lg1 = length oper - lg lg = length stem tab = mapInTable (\w -> stemv ++ " + \"" ++ drop lg w ++ "\"") tab0 rec = "\n {s = " ++ tbl ++ (if null inhs then "" else " ;\n ") ++ concat (intersperse " ;\n " ["h" ++ show i ++ " = " ++ p | (i,p) <- zip [1..] inhs] ) ++ "\n }" tbl = case tab of [("INVAR",ss)] -> altsGF ss --- a hack to avoid one-branch tables; name-sensit. _ -> "table {\n" ++ concat (intersperse " ;\n" [" "++ a ++ " => "++ altsGFRes b | (a,b) <- tab] ) ++ "\n }" end = " ;\n" prGF :: Dictionary -> String prGF dict = cats ++ (unlines (map prGFRule (zip [0..] (removeAttr dict)))) where cs = unlines ["cat " ++ c ++ ";" | c <- map fst $ classifyDict dict] cats = "\n" ++ cs ++ "\n\n" prGFRule :: (Int,(String, Ident, [Ident], Table Ident)) -> String prGFRule (i,(id,cat,inhs,tab)) = let name = id ++ "_" ++ show i in "fun " ++ name ++ " : " ++ cat ++ " ;\n\n" ++ "lin " ++ name ++ " = {s = table {\n" ++ concat (intersperse " ;\n" [" "++ a ++ " => "++ altsGF b | (a,b) <- tab]) ++ (if null inhs then "}" else " };\n ") ++ concat (intersperse " ;\n " ["h" ++ show i ++ " = " ++ p | (i,p) <- zip [1..] inhs] ) ++ "\n} ;\n" -- two GF modes for free variation; old for GF<0.98 altsGF xs = case (unStr xs) of [x] -> prQ x ys -> "variants"++" {" ++ unwords (intersperse ";" (map prQ ys)) ++ "}" where where prQ s = '"' : s ++ "\"" altsGFOld = show . prAlts altsGFRes xs = case (unStr xs) of [x] -> x ys -> "variants"++" {" ++ unwords (intersperse ";" ys) ++ "}" -- code for XML type TagId = String type XML = String type Struct = Bool string :: String -> [XML] string = (:[]) render :: [XML] -> String render xs = unlines xs tag :: TagId -> [XML] -> [XML] tag t xs = (("<" ++ t ++ ">"): (map (' ':) xs)) ++ ["</" ++ t ++ ">"] tagA :: TagId -> (String,String) -> [XML] -> [XML] tagA t (a,b) xs = (("<" ++ t ++ " " ++ a ++ "=\"" ++ b ++ "\"" ++ ">"): (map (' ':) xs)) ++ ["</" ++ t ++ ">"] tagA1 :: TagId -> (String,String) -> XML tagA1 t (a,b) = "<" ++ t ++ " " ++ a ++ "=\"" ++ b ++ "\"" ++ " />" prXML :: Dictionary -> String prXML d = "<?xml version=\"1.0\"?>\n" ++ (render (tag "lexicon" (concat (map (uncurry pr) (classifyDict d))))) where pr cat entries = tagA "class" ("category",cat) (concat (map (prEntry . noAttr) entries)) prEntry (stem,_,inhs,tbl) = tag "lexicon_entry" $ tagA1 "dictionary_form" ("value",stem) :(prInh inhs ++ prTabl tbl) prInh inhs = map (\s -> tagA1 "inherent" ("value",s)) inhs prTabl tbl = tag "inflection_table" $ concat [tagA "inflection_form" ("pos",a) (map (\s -> tagA1 "variant" ("word",s)) (unStr b)) | (a,b) <- existingForms tbl] -- code for Xerox LexC prLEXC :: Dictionary -> String prLEXC = ("LEXICON Root\n" ++) . (++ "END") . unlines . map (uncurry prLEXCRules) . classifyDict prLEXCRules :: Ident -> [Entry] -> String prLEXCRules cat entries = unlines $ ("\n! category " ++ cat ++ "\n") : (map (prEntry . noAttr) entries) where prEntry (stem,_,inhs,tbl) = concat (map (prForm stem inhs) ([(a,unStr b) | (a,b) <- existingForms tbl])) prForm stem inhs (a,b) = concat [x ++ ":" ++ stem ++ prTags (a:inhs) ++ " # ;\n" | x <- b] prTags ts = concat ["+" ++ w | t <- ts, w <- words (prFlat t)] altsLEXC cs = unwords $ intersperse " # ;" [ s | s <- cs] -- code for Xerox Finite State Tool prXFST :: Dictionary -> String prXFST = unlines . map (uncurry prXFSTRules) . classifyDict prXFSTRules :: Ident -> [Entry] -> String prXFSTRules cat entries = unlines $ ("define " ++ cat ++ " [") : intersperse " |" (map (prEntry . noAttr) entries) ++ [" ] ;"] where prEntry (stem,_,inhs,tbl) = concat (intersperse " |\n" (map (prForm stem inhs) ([(a,unStr b) | (a,b) <- existingForms tbl]))) prForm stem inhs (a,b) = " [ {" ++ stem ++ "}" ++ prTags (a:inhs) ++ " .x. " ++ altsXFST b ++"]" prTags ts = unwords [" %+" ++ w | t <- ts, w <- words (prFlat t)] altsXFST cs = unwords $ intersperse "|" ["{" ++ s ++ "}" | s <- cs] -- a "book" with LaTeX tables prLatex :: Dictionary -> String prLatex d = unlines (beginLatex ++ map prLatexTable (removeAttr d) ++ endLatex) where beginLatex = ["\\documentclass{report}", "\\usepackage{isolatin1}", "\\begin{document}"] endLatex = ["\\end{document}"] prLatexTable :: EntryN -> String prLatexTable (ident,cat,inhs,tab) = unwords ((ident ++ ",") : cat : inhs) ++ "\n" ++ "\\begin{center}\n\\begin{tabular}{|l|l|}\\hline\n" ++ unlines [a ++ " & {\\em " ++ prAlts b ++ "} \\\\" | (a,b) <- tab] ++ "\\hline\n\\end{tabular}\n\\end{center}\n\\newpage\n\n" -- use prValue instead of this! {- where prTag = unpar . unwords . twords -- remove the outermost constructor twords s = case words s of (_:w:ws) -> w:ws -- but only if something is left ws -> ws unpar s = case s of -- remove the outer parentheses '(':cs | last cs == ')' -> init cs _ -> s -} -- SQL --------------------------------------------------------- wordLength = 50 :: Int attrLength = 30 :: Int type Schema = String -- The database structure type Element = String -- the database content type TableS = String -- a table type Column = String -- a column (attribute) type Value = String -- a value of a column (attribute) type DatabaseName = String prSqlSchema :: Dictionary-> DatabaseName -> String prSqlSchema dict dname = "\n-- The Morphology Schema.\n\n" ++ "DROP DATABASE IF EXISTS " ++ dname ++ ";\n" ++ "CREATE DATABASE " ++ dname ++ ";\n" ++ "USE " ++ dname ++ ";\n\n" ++ lexicon ++ "GRANT ALL PRIVILEGES ON " ++ dname ++".* TO PUBLIC ; \n\n" -- A instance needs to: -- * Be put in the lexicon with a unique identifier -- * Be put in the class schema -- * Be put in the inherent schema prSQL :: Dictionary -> String prSQL = (lexicon ++) . unlines . map prSql . zip [1..] . removeAttr where prSql (i,(stem, cat, inh, table)) = lexic i stem cat (expand table inh) lexic i stem cat t = unlines [insert "LEXICON" [show i,stem,cat,b,a] | (a,b) <- t] expand table inh = [(a ++ " - " ++ (unwords inh) ,s) | (a,b) <- table, s <- unStr b] {- prWordsCl :: [(String,[((Int,String),[String])])] -> [String] prWordsCl [] = [] prWordsCl ((c,((n1,w1),as1):xs):xss) = (insert c ([show n1,w1,show n1] ++ as1) : [insert c ([show n,w,show n1] ++as) | ((n,w),as) <- xs]) ++ prWordsCl xss innerNumber :: [(a,[(b,[c])])] -> Int -> [(a,[((Int,b),[c])])] innerNumber [] _ = [] innerNumber ((a,xs):xss) n = (a,number xs n) : innerNumber xss (n+(length xs)) where number xs n = zipWith f [n..] xs f n (s,zs) = ((n,s),zs) -} ----------------------------------------------------- emptyE :: Value emptyE = "NULL" insert :: TableS -> [Value] -> Element insert t vs = "INSERT INTO " ++ t ++ " VALUES ('" ++ (concat (intersperse "','" vs)) ++"');" type Name = String type Type = String type TypeConstraint = String type Constraint = String primaryKey :: Name -> Constraint primaryKey n = "PRIMARY KEY (" ++ n ++ ")" foreignKey :: Name -> (Name,Name) -> Constraint foreignKey n (n1,n2) = "FOREIGN (" ++ n ++ ") REFERENCES " ++ n1 ++ "(" ++ n2 ++ ")" varchar :: Int -> Type varchar n = "VARCHAR(" ++ show n ++ ")" intType :: Type intType = "INTEGER" notNull :: TypeConstraint notNull = "NOT NULL" createTable :: Name -> [(Name,Type,TypeConstraint)] -> [Constraint] -> TableS createTable n xs cs = "CREATE TABLE " ++ n ++ "\n(\n" ++ (concat ((intersperse ",\n" [n ++ " " ++ t ++ " " ++ tc | (n,t,tc) <- xs]))) ++ concat (intersperse ",\n" cs) ++ ");\n\n" lexicon :: TableS lexicon = createTable "LEXICON" [ ("ID", intType, notNull), ("DICTIONARY",varchar wordLength,notNull), ("CLASS",varchar wordLength,notNull), ("WORD",varchar wordLength,notNull), ("POS",varchar wordLength,notNull) ] []
johnjcamilleri/maltese-functional-morphology
lib-1.1/Print.hs
lgpl-3.0
11,448
30
18
2,998
3,810
2,048
1,762
210
4
-- The sum of the squares of the first ten natural numbers is, -- 12 + 22 + ... + 102 = 385 -- The square of the sum of the first ten natural numbers is, -- (1 + 2 + ... + 10)2 = 552 = 3025 -- Hence the difference between the sum of the squares of the first ten natural numbers and the square of the sum is 3025 385 = 2640. -- Find the difference between the sum of the squares of the first one hundred natural numbers and the square of the sum. -- http://projecteuler.net/index.php?section=problems&id=6 sumOfSq :: (Num a, Enum a) => a -> a sumOfSq x = sum $ map (^2) [1 .. x] sqOfsum :: (Num a, Enum a) => a -> a sqOfsum x = ( sum [1 .. x] ^ 2) difference a = (sqOfsum a) - (sumOfSq a)
nmarshall23/Programming-Challenges
project-euler/006/06.hs
unlicense
694
0
8
154
131
73
58
5
1
{-# LANGUAGE OverloadedStrings, DataKinds #-} import Database.Cassandra.CQL import Control.Monad import Control.Monad.CatchIO import Control.Monad.Trans (liftIO) import Data.Int import Data.ByteString.Char8 (ByteString) import qualified Data.ByteString.Char8 as C import Data.Set (Set) import qualified Data.Set as S import Data.Text (Text) import qualified Data.Text as T import Data.UUID import System.Random dropLists :: Query Schema () () dropLists = "drop table sets" createLists :: Query Schema () () createLists = "create table sets (id uuid PRIMARY KEY, items set<text>)" insert :: Query Write (UUID, Set Text) () insert = "insert into sets (id, items) values (?, ?)" select :: Query Rows () (Set Text) select = "select items from sets" ignoreDropFailure :: Cas () -> Cas () ignoreDropFailure code = code `catch` \exc -> case exc of ConfigError _ _ -> return () -- Ignore the error if the table doesn't exist _ -> throw exc main = do pool <- newPool [("localhost", "9042")] "test" -- servers, keyspace runCas pool $ do ignoreDropFailure $ liftIO . print =<< executeSchema QUORUM dropLists () liftIO . print =<< executeSchema QUORUM createLists () u1 <- liftIO randomIO u2 <- liftIO randomIO u3 <- liftIO randomIO executeWrite QUORUM insert (u1, S.fromList ["one", "two"]) executeWrite QUORUM insert (u2, S.fromList ["hundred", "two hundred"]) executeWrite QUORUM insert (u3, S.fromList ["dozen"]) liftIO . print =<< executeRows QUORUM select ()
ducis/cassandra-minimalist
cassandra-cql-0.3.0.1/tests/test-set.hs
apache-2.0
1,566
0
13
327
473
250
223
38
2
import Data.IntMap pack t [] = t pack t (n:ns) = let t' = if (member n t) then update (\v -> Just(v+1)) n t else insert n 1 t in pack t' ns depack 10 _ = [] depack n t = let v = if (member n t) then t ! n else 0 in v:(depack (n+1) t) dec 0 = [] dec n = let a = n `div` 10 b = n `mod` 10 in b:(dec a) ans' x y = let p = Prelude.map (\xx -> Prelude.map (\yy-> yy*xx) y) x d = concat $ Prelude.map dec $ concat p s = pack empty d a = depack 0 s in a ans ([0,0]:_) = [] ans (_:x:y:r) = let a = ans' x y in a:(ans r) main = do c <- getContents let i = Prelude.map (Prelude.map read) $ Prelude.map words $ lines c :: [[Int]] o = ans i mapM_ putStrLn $ Prelude.map unwords $ Prelude.map (Prelude.map show) o
a143753/AOJ
3201.hs
apache-2.0
935
0
15
401
496
250
246
33
2
{-# LANGUAGE OverloadedStrings #-} module Data.Geometry.PolygonSpec where import Test.Hspec (Spec, describe, it, shouldBe) import qualified Data.Geometry.Clip.Internal.Polygon as InternalPolygon import qualified Data.SpecHelper as SpecHelper simplePolygonPts :: [(Double, Double)] simplePolygonPts = [(0, 0), (4, 0), (4, 4), (0, 4)] expectedSimplePolygonPts :: [(Double, Double)] expectedSimplePolygonPts = [(0, 0), (4, 0), (4, 4), (0, 4), (0,0)] spec :: Spec spec = testCloseIfNot testCloseIfNot :: Spec testCloseIfNot = describe "close" $ it "Small set of points" $ InternalPolygon.closeIfNot (SpecHelper.listToSequenceGeo simplePolygonPts) `shouldBe` Just (SpecHelper.listToSequenceGeo expectedSimplePolygonPts)
sitewisely/zellige
test/Data/Geometry/PolygonSpec.hs
apache-2.0
846
0
10
210
229
144
85
18
1
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE FlexibleContexts #-} module Yesod.Form.Class ( {- FIXME ToForm (..) , -} ToField (..) ) where import Text.Hamlet import Yesod.Form.Fields import Yesod.Form.Types import Yesod.Form.Functions (areq, aopt) import Data.Int (Int64) import Data.Time (Day, TimeOfDay) import Data.Text (Text) import Yesod.Message (RenderMessage) {- class ToForm a where toForm :: AForm sub master a -} class ToField a master where toField :: (RenderMessage master msg, RenderMessage master FormMessage) => FieldSettings msg -> Maybe a -> AForm sub master a {- FIXME instance ToFormField String y where toFormField = stringField instance ToFormField (Maybe String) y where toFormField = maybeStringField -} instance ToField Text master where toField = areq textField instance ToField (Maybe Text) master where toField = aopt textField instance ToField Int master where toField = areq intField instance ToField (Maybe Int) master where toField = aopt intField instance ToField Int64 master where toField = areq intField instance ToField (Maybe Int64) master where toField = aopt intField instance ToField Double master where toField = areq doubleField instance ToField (Maybe Double) master where toField = aopt doubleField instance ToField Day master where toField = areq dayField instance ToField (Maybe Day) master where toField = aopt dayField instance ToField TimeOfDay master where toField = areq timeField instance ToField (Maybe TimeOfDay) master where toField = aopt timeField instance ToField Html master where toField = areq htmlField instance ToField (Maybe Html) master where toField = aopt htmlField instance ToField Textarea master where toField = areq textareaField instance ToField (Maybe Textarea) master where toField = aopt textareaField {- FIXME instance ToFormField Bool y where toFormField = boolField -}
chreekat/yesod
yesod-form/Yesod/Form/Class.hs
bsd-2-clause
2,048
0
10
392
490
260
230
50
0
module CodeGeneratorTest where import Test.Hspec import qualified CodeGenerator as CG htmlTokens1 = [("sTag", "startDocument"), ("eTag", "endDocument")] latex1 :: String latex1 = "\\documentclass[11pt]{article}\n\n\\begin{document}\n\n\\end{document}" testCodeGenerator = hspec $ do describe "Code Generator" $ do it "returns LaTeX 'red' when given 'red' as input" $ do CG.generateLaTeX [("Word", "red")] `shouldBe` "red" it "returns latex1 when given htmlTokens1 as input" $ do CG.generateLaTeX htmlTokens1 `shouldBe` latex1 it "returns LaTeX '\\textit{red}' when given tokenised version of '<i>red</i>' as input" $ do CG.generateLaTeX [("sTag", "italics"), ("Word", "red"), ("eTag", "italics")] `shouldBe` "\\textit{red}"
chris-bacon/HTML-LaTeX-Compiler
tests/CodeGeneratorTest.hs
bsd-3-clause
794
0
17
155
175
98
77
14
1
{-# LANGUAGE ConstrainedClassMethods #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ViewPatterns #-} module Diagrams.Coordinates.Isomorphic ( -- * Type constraints HasIndexedBasis, Euclidean -- * Vector like , VectorLike (..) , V2Like, V3Like -- * Point like , PointLike (..) , P2Like, P3Like ) where import Control.Lens import Data.Complex import Data.Typeable import Diagrams.Prelude type HasIndexedBasis v = (HasBasis v, TraversableWithIndex (E v) v) -- | Umbrella class giving everything needed for working in the space. This is -- basically a @V*@ from "linear". type Euclidean v = (HasLinearMap v, HasIndexedBasis v, Metric v) -- vector like --------------------------------------------------------- -- | Provides an 'Iso\'' between @a@ and @v n@. This is normally used to -- convert between the data type you're already using, @a@, and diagram's -- native form, @v n@. class (Euclidean v, Typeable v) => VectorLike v n a | a -> v n where -- | Isomorphism from @Point v n@ to something 'PointLike' @a@. -- -- >>> V2 3 5 ^. vectorLike :: (Int, Int) -- (3,5) vectorLike :: Iso' (v n) a -- | Isomorphism from something 'PointLike' @a@ to @Point v n@. -- -- >>> ((3, 5) :: (Int, Int)) ^. unvectorLike -- V2 3 5 unvectorLike :: Iso' a (v n) unvectorLike = from vectorLike {-# INLINE unvectorLike #-} instance VectorLike V2 n (V2 n) where vectorLike = id {-# INLINE vectorLike #-} type V2Like = VectorLike V2 instance VectorLike V2 n (n, n) where vectorLike = iso unr2 r2 {-# INLINE vectorLike #-} instance VectorLike V2 n (Complex n) where vectorLike = iso (\(V2 x y) -> x :+ y) (\(i :+ j) -> V2 i j) {-# INLINE vectorLike #-} type V3Like = VectorLike V3 instance VectorLike V3 n (V3 n) where vectorLike = id {-# INLINE vectorLike #-} instance VectorLike V3 n (n, n, n) where vectorLike = iso unr3 r3 {-# INLINE vectorLike #-} -- point like ---------------------------------------------------------- -- | Provides an 'Iso\'' between @a@ and @Point v n@. This is normally used to -- convert between the data type you're already using, @a@, and diagram's -- native form, @Point v n@. class (Euclidean v, Typeable v) => PointLike v n a | a -> v n where -- | Isomorphism from @Point v n@ to something 'PointLike' @a@. -- -- >>> mkP2 3 5 ^. pointLike :: (Int, Int) -- (3,5) pointLike :: Iso' (Point v n) a -- | Isomorphism from something 'PointLike' @a@ to @Point v n@. -- -- >>> ((3, 5) :: (Int, Int)) ^. unpointLike -- P (V2 3 5) unpointLike :: Iso' a (Point v n) unpointLike = from pointLike {-# INLINE unpointLike #-} -- | Things that are isomorphic to points in R2. type P2Like = PointLike V2 instance PointLike V2 n (P2 n) where pointLike = id instance PointLike V2 n (V2 n) where pointLike = iso (\(unp2 -> (x,y)) -> V2 x y) (\(V2 x y) -> x ^& y) {-# INLINE pointLike #-} instance PointLike V2 n (n, n) where pointLike = iso unp2 p2 {-# INLINE pointLike #-} instance PointLike V2 n (Complex n) where pointLike = iso (\(unp2 -> (x,y)) -> x :+ y) (\(i :+ j) -> p2 (i,j)) {-# INLINE pointLike #-} type P3Like = PointLike V3 instance PointLike V3 n (P3 n) where pointLike = id {-# INLINE pointLike #-} instance PointLike V3 n (n, n, n) where pointLike = iso unp3 p3 {-# INLINE pointLike #-}
AjayRamanathan/plots
src/Diagrams/Coordinates/Isomorphic.hs
bsd-3-clause
3,720
0
11
907
783
451
332
70
0
{-# LANGUAGE OverloadedStrings, RecordWildCards, ScopedTypeVariables #-} module Main where import Web.Scotty import Network.Wai.Handler.Warp import qualified Data.Text.Lazy as TL import Control.Monad.IO.Class import Data.Aeson main = do app <- scottyApp $ do post "/" $ do ps <- params -- liftIO $ print ps b <- body -- liftIO $ print b let r = encode ps raw r run 3011 $ app
spacewaffle/ether
exp/posttest/Main.hs
bsd-3-clause
432
0
17
117
108
58
50
15
1
module Parse where import Benchmark import Data.Csv import Data.List import System.Directory import Data.Either import qualified Data.Vector as V import qualified Data.ByteString.Lazy.Char8 as BS import Data.Time.Clock.POSIX import Data.Time.LocalTime import System.FilePath gulpLogs :: FilePath -> IO [V.Vector Benchmark] gulpLogs f = do conts <- getDirectoryContents f let justCsv = filter (isSuffixOf ".csv") conts let noHidden = filter (\a -> not (isPrefixOf "." a)) justCsv let toGulp = fmap (\a -> f </> a) noHidden logs <- sequence $ fmap parseLog toGulp return $ rights logs parseLog :: FilePath -> IO (Either String (V.Vector Benchmark)) parseLog p = do file <- BS.readFile p let (hdr, csv) = splitHeader file delimiter timezone <- getCurrentTimeZone case (getEpochTime hdr) of Nothing -> return $ Left "missing timestamp!" Just ts -> case (decode HasHeader csv) of Right bm -> return $ Right $ fmap (\a -> a {benchTimestamp = utcToLocalTime timezone $ posixSecondsToUTCTime $ realToFrac ts}) bm delimiter :: String delimiter = take 80 $ repeat '-' getEpochTime :: [String] -> Maybe Int getEpochTime s = do elm <- find (isPrefixOf "Epoch Timestamp:") s elm' <- stripPrefix "Epoch Timestamp:" elm return (read elm' :: Int) splitHeader :: BS.ByteString -> String -> ([String], BS.ByteString) splitHeader msg delim = (hdr, BS.pack $ unlines csv) where (hdr, csv) = let ((hdrr, csvr), _) = foldl' foldFn initAcc lns in (reverse hdrr, reverse csvr) lns = lines $ BS.unpack msg initAcc = (([],[]), False) foldFn ((ls, rs), True) e = ((ls, e:rs), True) foldFn ((ls, rs), False) e = if e == delim then ((ls, rs), True) else ((e:ls, rs), False)
ssaavedra/liquidhaskell
scripts/plot-benchmarks/src/Parse.hs
bsd-3-clause
2,084
0
21
699
712
375
337
51
3
{-# LANGUAGE EmptyCase #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE LambdaCase #-} module Data.Derivation.Simple where import Data.Type.Nat import Data.Type.Nat.Quote import Data.Type.Product -- import Type.Class.Witness (Dec(..)) import Type.Family.List import Type.Family.Nat import Type.Family.Tuple import Data.Void data Snt a = At a | Not (Snt a) | Conj (Snt a) (Snt a) | Disj (Snt a) (Snt a) | Cond (Snt a) (Snt a) deriving (Eq,Ord,Show) type (&&) = Conj type (||) = Disj type (~>) = Cond infixr 3 && infixr 2 || infixr 1 ~> data Sent (at :: k -> *) :: Snt k -> * where Atom :: !(at a) -> Sent at (At a) Neg :: !(Sent at p) -> Sent at (Not p) (:&) :: !(Sent at p) -> !(Sent at q) -> Sent at (Conj p q) (:|) :: !(Sent at p) -> !(Sent at q) -> Sent at (Disj p q) (:->) :: !(Sent at p) -> !(Sent at q) -> Sent at (Cond p q) infixr 3 :& infixr 2 :| infixr 1 :-> data VProd (f :: k -> *) :: N -> [k] -> * where Nil :: VProd f Z Ø (:%) :: !(f a) -> !(VProd f n as) -> VProd f (S n) (a :< as) infixr 5 :% toProd :: VProd f n as -> Prod f as toProd = \case Nil -> Ø a :% as -> a :< toProd as data Atomic (p :: pred -> N -> *) (t :: k -> *) :: (pred,[k]) -> * where A :: !(p r n) -> !(VProd t n as) -> Atomic p t (r#as) data Form (p :: pred -> N -> *) (t :: k -> *) :: Snt (pred,[k]) -> * where Form :: !(Sent (Atomic p t) a) -> Form p t a type family DecideA (t :: k -> *) (p :: pred) (as :: [k]) :: * type family DecideS (t :: k -> *) (s :: Snt (pred,[k])) :: * where DecideS t (At a) = DecideA t (Fst a) (Snd a) DecideS t (Not p) = DecideS t p -> Void DecideS t (Conj p q) = (DecideS t p,DecideS t q) DecideS t (Disj p q) = Either (DecideS t p) (DecideS t q) -- DecideS t (Cond p q) = DecideS t p -> DecideS t q DecideS t (Cond p q) = DecideS t (Disj (Not p) q) class DecideAtom (t :: k -> *) (p :: pred -> N -> *) where decideAtom :: p r n -> VProd t n as -> Dec (DecideA t r as) decideSent :: DecideAtom t p => Sent (Atomic p t) a -> Dec (DecideS t a) decideSent = \case Atom (A p as) -> decideAtom p as Neg p -> case decideSent p of Proven a -> Refuted "negation" $ \k -> k a Refuted _ j -> Proven j p :& q -> case (decideSent p, decideSent q) of (Proven a ,Proven b ) -> Proven (a,b) (Refuted e j,_ ) -> Refuted e $ \(a,_) -> j a (_ ,Refuted e k) -> Refuted e $ \(_,b) -> k b p :| q -> case (decideSent p, decideSent q) of (Refuted e1 j,Refuted e2 k) -> Refuted (e1 ++ "\n" ++ e2) $ \case Left a -> j a Right b -> k b (Proven a,_ ) -> Proven $ Left a (_ ,Proven b) -> Proven $ Right b p :-> q -> case (decideSent p, decideSent q) of (Proven a,Refuted e k) -> Refuted ("conditional: " ++ e) $ \case Left j -> j a Right b -> k b (_ ,Proven b) -> Proven $ Right b (Refuted _ j,_ ) -> Proven $ Left j -- p :-> q -> case (decideSent p, decideSent q) of -- (Proven a,Refuted k) -> Refuted $ \l -> k $ l a -- (_ ,Proven b) -> Proven $ \_ -> b -- (Refuted j,_ ) -> Proven $ absurd . j type instance DecideA Nat (o :: Ordering) '[x,y] = NatIneq o x y decideLess :: Nat x -> Nat y -> Dec (NatIneq LT x y) decideLess = \case Z_ -> \case Z_ -> Refuted "!(Z < Z)" $ \case S_ y -> Proven $ LTZero $ S_ y S_ x -> \case Z_ -> Refuted "!(S x < Z)" $ \case S_ y -> case decideLess x y of Proven l -> Proven $ LTSucc l Refuted e k -> Refuted e $ \(LTSucc l) -> k l decideEqual :: Nat x -> Nat y -> Dec (NatIneq EQ x y) decideEqual = \case Z_ -> \case Z_ -> Proven EQZero S_ _ -> Refuted "!(Z = S y)" $ \case S_ x -> \case Z_ -> Refuted "!(S x = Z)" $ \case S_ y -> case decideEqual x y of Proven l -> Proven $ EQSucc l Refuted e k -> Refuted e $ \(EQSucc l) -> k l decideMore :: Nat x -> Nat y -> Dec (NatIneq GT x y) decideMore = \case Z_ -> \_ -> Refuted "!(Z > y)" $ \case S_ x -> \case Z_ -> Proven $ GTZero $ S_ x S_ y -> case decideMore x y of Proven l -> Proven $ GTSucc l Refuted e k -> Refuted e $ \(GTSucc l) -> k l data family NatIneq (o :: Ordering) :: N -> N -> * data instance NatIneq LT x y where LTZero :: !(Nat (S y)) -> NatIneq LT Z (S y) LTSucc :: !(NatIneq LT x y) -> NatIneq LT (S x) (S y) data instance NatIneq EQ x y where EQZero :: NatIneq EQ Z Z EQSucc :: !(NatIneq EQ x y) -> NatIneq EQ (S x) (S y) data instance NatIneq GT x y where GTZero :: !(Nat (S x)) -> NatIneq GT (S x) Z GTSucc :: !(NatIneq GT x y) -> NatIneq GT (S x) (S y) deriving instance Show (NatIneq LT x y) deriving instance Show (NatIneq EQ x y) deriving instance Show (NatIneq GT x y) instance DecideAtom Nat Inequality where decideAtom = \case Less -> \(x :% y :% Nil) -> decideLess x y Equal -> \(x :% y :% Nil) -> decideEqual x y More -> \(x :% y :% Nil) -> decideMore x y data Inequality :: Ordering -> N -> * where Less :: Inequality LT [qN|2|] Equal :: Inequality EQ [qN|2|] More :: Inequality GT [qN|2|] {- -- Arith {{{ data Ar = Val N | Add Ar Ar | Mul Ar Ar deriving (Eq,Ord,Show) data Arith :: Ar -> * where Nat :: !(Nat n) -> Arith (Val n) Plus :: !(Arith x) -> !(Arith y) -> Arith (Add x y) Times :: !(Arith x) -> !(Arith y) -> Arith (Mul x y) type family Eval (e :: Ar) :: N where Eval (Val x) = x Eval (Add x y) = Eval x + Eval y Eval (Mul x y) = Eval x * Eval y eval :: Arith e -> Nat (Eval e) eval = \case Nat x -> x Plus x y -> eval x .+ eval y Times x y -> eval x .* eval y -- }}} -} type x .<. y = At (LT # '[x,y]) type x .=. y = At (EQ # '[x,y]) type x .>. y = At (GT # '[x,y]) (.<.) :: n x -> n y -> Form Inequality n (x .<. y) x .<. y = atom Less $ x :% y :% Nil (.=.) :: n x -> n y -> Form Inequality n (x .=. y) x .=. y = atom Equal $ x :% y :% Nil (.>.) :: n x -> n y -> Form Inequality n (x .>. y) x .>. y = atom More $ x :% y :% Nil infix 4 .<., .=., .>. atom :: p r n -> VProd t n as -> Form p t (At (r # as)) atom p as = Form $ Atom $ A p as neg :: Form p t a -> Form p t (Not a) neg (Form a) = Form $ Neg a (.&.) :: Form p t a -> Form p t b -> Form p t (Conj a b) Form a .&. Form b = Form $ a :& b (.|.) :: Form p t a -> Form p t b -> Form p t (Disj a b) Form a .|. Form b = Form $ a :| b (.->) :: Form p t a -> Form p t b -> Form p t (Cond a b) Form a .-> Form b = Form $ a :-> b infixr 3 .&. infixr 2 .|. infixr 1 .-> type Exp = Form Inequality Nat -- e0 :: Nat x -> Nat y -> Exp (x .<. y || y .<. x) exp0 x y = x .<. y .|. y .<. x -- e1 :: Nat x -> Nat y -> Nat z -> Exp (x .<. y ~> y .<. z ~> x .<. z) exp1 x y z = x .<. y .-> y .<. z .-> x .<. z data Dec a = Proven a | Refuted String (a -> Void) display :: Show a => Dec a -> IO () display = \case Proven a -> putStrLn $ "success: " ++ show a Refuted e _ -> putStrLn $ "failure: " ++ e decide :: DecideAtom t p => Form p t a -> Dec (DecideS t a) decide (Form s) = decideSent s decide_ :: (DecideAtom t p, Show (DecideS t a)) => Form p t a -> IO () decide_ = display . decide
kylcarte/derivation
src/Data/Derivation/Inequality.hs
bsd-3-clause
7,739
0
17
2,357
3,474
1,794
1,680
-1
-1
{-# LANGUAGE GADTs #-} module Main where import Data.Word import Architecture.ARM.Common import Architecture.ARM.Instructions.UAL import Architecture.ARM.Decoder.ARM import Architecture.ARM.Decoder.Thumb import Architecture.ARM.Pretty import Test.Framework (defaultMain, testGroup) import Test.Framework.Providers.HUnit import Test.HUnit import Objdump import System.Random import Data.Char import Data.List import Data.Maybe import Text.Printf -- FIXME: ICK, make this whole test setup less ugly ualMatches :: Word32 -> GeneralInstruction UAL -> String -> Assertion ualMatches off i@(Conditional _ _) s | "invalid" `isInfixOf` s || "undefined" `isInfixOf` s || null s = assertFailure (printf "invalid instruction '%s' decoded to '%s'" s (showGeneralInstruction i)) ualMatches off i@(Unconditional _) s | "invalid" `isInfixOf` s || "undefined" `isInfixOf` s || null s = assertFailure (printf "invalid instruction '%s' decoded to '%s'" s (showGeneralInstruction i)) ualMatches off Undefined s = assertBool "" True ualMatches off x ('b':c0:c1:'.':'n':r) = ualMatches off x $ 'b':c0:c1:r ualMatches off x ('s':'t':'m':'i':'a':r) = ualMatches off x $ 's':'t':'m':r ualMatches off x ('l':'d':'m':'i':'a':r) = ualMatches off x $ 'l':'d':'m':r ualMatches off (Conditional cond (B op)) s = map toLower (showGeneralInstruction (Conditional cond (B $ op + (fromIntegral off)))) @?= s ualMatches off (Conditional cond (BL op)) s = map toLower (showGeneralInstruction (Conditional cond (BL $ op + (fromIntegral off)))) @?= s ualMatches off (Unconditional (BLX (Imm op))) s = map toLower (showGeneralInstruction (Unconditional (BLX (Imm $ op + (fromIntegral off))))) @?= s ualMatches off x s = map toLower (showGeneralInstruction x) @?= s testDecoderThumb = do let f = thumbDecode rs <- fmap (take 1000 . randoms) getStdGen let ws = map fromIntegral (rs :: [Int]) insns <- disassemble16 "/Users/pumpkin/summon-arm-toolchain/sources/src/binutils/objdump" ["-b", "binary", "-m", "arm", "-M", "force-thumb"] ws let ours = map (\(_, x) -> either i16 i32 x) insns let tests = zipWith4 (\off w o u -> testCase (printf "0x%04x: %s %s" off w u) (ualMatches off o u)) [0,2..] (map (either (printf "%04x\t\t" :: Word16 -> String) (uncurry (printf "%04x %04x\t")) . snd) insns) ours (map fst insns) return $ testGroup ("Thumb decoder") tests where i16 :: Word16 -> GeneralInstruction UAL i16 i = case thumbDecode i of Done x -> x Word16 _ -> error "decoded 16-bit instruction to larger instruction" i32 :: (Word16, Word16) -> GeneralInstruction UAL i32 (i0, i1) = case thumbDecode i0 of Done x -> error $ "decoded 32-bit instruction to smaller instruction (" ++ show x ++ ")" Word16 f -> case f i1 of Done x -> x Word16 _ -> error "decoded 32-bit instruction to larger instruction" testDecoderARM d f s a = do let f = armDecode rs <- fmap (take 100 . randoms) getStdGen let ws = map fromIntegral (rs :: [Int]) insns <- disassemble "/Users/pumpkin/summon-arm-toolchain/sources/src/binutils/objdump" ["-b", "binary", "-m", "arm"] ws let ours = map f ws let tests = zipWith4 (\off w o u -> testCase (printf "0x%08x: 0x%08x %s" off w u) (ualMatches off o u)) [0,4..] ws ours insns return $ testGroup ("ARM decoder") tests main = defaultMain =<< sequence [{-testDecoderARM-} testDecoderThumb] {- Thumb issues =============== 0x06d0: 0x4495 add sp, r2: [Failed] Failed: expected: "add sp, r2" but got: "add r5, r2" -} {- ARM issues ================ 0x000002f8: 0x4141fb0a mrsmi pc, SPSR: [Failed] Failed: expected: "mrsmi pc, SPSR" but got: "mrsmi pc, cpsr" 0x000001f8: 0x346f8e23 strbtcc r8, [pc], #-3619: [Failed] Failed: expected: "strbtcc r8, [pc], #-3619" but got: "strbtcc r8, [pc], #3619 0x000003b4: 0x542fdb6f strtpl sp, [pc], #-2927: [Failed] Failed: expected: "strtpl sp, [pc], #-2927" but got: "strtpl sp, [pc], #2927" 0x000002cc: 0x7366f7f2 msrvc SPSR_sx, #63438848: [Failed] Failed: expected: "msrvc SPSR_sx, #63438848" but got: "msrvc" 0x00009664: 0xe1b0fb1a lsls pc, sl, fp: [Failed] Failed: expected: "lsls pc, sl, fp" but got: "lsls pc, sl, lsl fp" 0x00009578: 0xa1ff98b6 ldrhge r9, [pc, #134]: [Failed] Failed: expected: "ldrhge r9, [pc, #134]" but got: "ldrhge r9, [pc, #134]!" -- Do I care about this? 0x00000c24: 0x017c80f0 ldrsheq r8, [ip, #0]!: [Failed] Failed: expected: "ldrsheq r8, [ip, #0]!" but got: "ldrsheq r8, [ip]!" -}
copumpkin/charm
tests/Main.hs
bsd-3-clause
4,632
0
19
950
1,294
662
632
55
4
{-# LANGUAGE TupleSections #-} module ConsPD.Residualization where import ConsPD.Unfold import Control.Applicative ((<|>)) import qualified CPD.Residualization as CpdR import Data.List (find, intercalate, nub) import Data.List.NonEmpty (NonEmpty (..), toList) import Data.Maybe (catMaybes, fromJust, fromMaybe) import Descend import Embed (isVariant) import qualified Eval as E import qualified Residualization as Res import qualified Subst import Syntax import Text.Printf (printf) import Util.Miscellaneous (fst3, show') import Debug.Trace topLevel :: Program -> Program topLevel input = residualize $ ConsPD.Unfold.topLevel (-1) input residualize :: (ConsPDTree, G S, [S]) -> Program residualize (Fail, goal, names) = Program [] (generateGoal goal names) residualize (tree, goal, names) = -- let restricted = restrictSubsts tree in let restricted = tree in let (defs, newGoal) = generateDefs restricted in Program defs newGoal generateDefs :: ConsPDTree -> ([Def], G X) generateDefs tree = let toplevel = fromJust $ nodeContent tree in let leaves = collectLeaves tree in let gens = collectGens tree in let distinct = nub $ map snd leaves in let simplified = restrictSubsts $ simplify $ renameAmbigousVars tree in let nodes = (toplevel, simplified) : map (\(_,x) -> findNode x tree) gens ++ map (`findNode` tree) distinct in let definitions = foldl (\defs gs -> fst3 (CpdR.renameGoals gs defs) ) [] $ map fst nodes in let defWithTree = zip (reverse definitions) (map snd nodes) in let invocations = map (generateInvocation definitions) leaves in trace (printf "\nDefinitions\n%s\n\n" (intercalate "\n" (map show definitions))) $ let defs = map (generateDef definitions invocations) defWithTree in -- let defs = map (generateDef invocations) defWithTree in let (_, newGoal) = generateInvocation definitions (toplevel, toplevel) in (defs, Res.vident <$> newGoal) showDefinitions :: CpdR.Definitions -> String showDefinitions = intercalate "\n\n" . map go where go (gs, n, args) = printf "%s %s: %s" n (show args) (show gs) generateInvocation :: CpdR.Definitions -> ([G S], [G S]) -> ([G S], G S) generateInvocation defs (gs, v) = let Just (goal, n, as) = find ((v ==) . fst3) defs in let name = n in let args = generateArgs as in let res = call name args in (gs, call name args) where generateArgs xs = case CpdR.unifyInvocationLists v gs (Just Subst.empty) of Just subst -> map (\a -> fromMaybe (V a) (Subst.lookup a subst)) xs Nothing -> error (printf "Failed to generate invocation for %s" (show v)) getArgs (Invoke _ args) = args generateInvocation' :: CpdR.Definitions -> [G S] -> [G S] -> G S generateInvocation' defs gs v = let Just (goal, n, as) = find ((v ==) . fst3) defs in let name = n in let args = generateArgs as in let res = call name args in call name args where generateArgs xs = case CpdR.unifyInvocationLists v gs (Just Subst.empty) of Just subst -> map (\a -> fromMaybe (V a) (Subst.lookup a subst)) xs Nothing -> error (printf "Failed to generate invocation for %s" (show v)) getArgs (Invoke _ args) = args findNode :: [G S] -> ConsPDTree -> ([G S], ConsPDTree) findNode v tree = let nodes = go tree in -- trace (printf "\n\nNodes\n%s\n\n" (show nodes)) $ case find nontrivial nodes of Just n -> (v, restrictSubsts $ simplify $ renameAmbigousVars n) Nothing -> error $ printf "Residualization error: no node for\n%s" (show v) where go node@(Or _ (Descend goal _) _) | goal == v = return node go node@(Conj _ goal _) | goal == v = return node go node@(Split _ goal _) | goal == v = return node go node@(Gen ch _ goal _ _) | goal == v = [node, ch] go node@(Or _ (Descend goal _) _) | goal `isVariant` v = return node go node@(Conj _ goal _) | goal `isVariant` v = return node go node@(Split _ goal _) | goal `isVariant` v = return node go node@(Gen ch _ goal _ _) | goal `isVariant` v = [node, ch] go (Or ch _ _) = concatMap go ch go (Conj ch _ _) = concatMap go ch go (Split ch _ _) = concatMap go ch go (Gen ch _ _ _ _) = go ch go _ = [] nontrivial :: ConsPDTree -> Bool nontrivial (Leaf _ _ _ _) = False nontrivial _ = True nodeContent (Or _ (Descend goal _) _) = Just goal nodeContent (Conj _ goal _) = Just goal nodeContent (Split _ goal _) = Just goal nodeContent x = Nothing -- error "Failed to get node content: unsupported node type" generateDef :: CpdR.Definitions -> [([G S], G S)] -> (([G S], Name, [S]), ConsPDTree) -> Def generateDef defs invocations ((gs, n, args), tree) = let body = generateGoalFromTree defs invocations tree args in let argsX = map Res.vident args in Def n argsX (E.postEval argsX body) -- generateGoalFromTree :: [([G S], G S)] -> ConsPDTree -> FN.FreshNames -> G X generateGoalFromTree :: CpdR.Definitions -> [([G S], G S)] -> ConsPDTree -> [S] -> G X generateGoalFromTree definitions invocations tree args = trace (printf "GenerateGoalFromTree\n%s\n\n" (show $ nodeContent tree)) $ case go args True tree of Just goal -> let normalized = goal in -- NonConjunctive.Unfold.disj $ map NonConjunctive.Unfold.conj $ normalize goal in Res.vident <$> normalized Nothing -> error $ printf "Failed to generate relation body for %s" (show $ nodeContent tree) -- Res.vident <$> (disj (map conj $ filter (not . null) $ go tree)) where residualizeEnv :: Subst.Subst -> Maybe (G S) residualizeEnv xs = (conj $ map (\(s, ts) -> (V s) === ts) $ reverse (Subst.toList xs)) <|> return success go :: [S] -> Bool -> ConsPDTree -> Maybe (G S) go seen r tree = let res = go' seen r tree in trace (printf "Go:\n%s\nTree\n%s\nR: %s\nSeen\n%s\nResult\n%s\n\n" (show (nodeContent tree)) (show tree) (show r) (show seen) (show res)) $ res where go' seen r Fail = Just failure go' seen r (Success ss _) = residualizeEnv ss -- go' seen False (Or ch (Descend gs _) s) | gs `elem` map fst3 definitions = do -- let unifs = residualizeEnv s -- let rest = generateInvocation' definitions gs gs -- mkGoal unifs (Just rest) go' seen r (Or ch (Descend gs _) s) = do -- let vs = getNewVars seen gs s let unifs = residualizeEnv s let rest = getInvocation r gs <|> (disj $ catMaybes $ map (go seen False) ch) let res = mkGoal unifs rest res go' seen r (Split ch gs s) = do -- let vs = getNewVars seen gs s let unifs = residualizeEnv s let rest = getInvocation r gs <|> (conj $ catMaybes $ map (go seen False) ch) mkGoal unifs rest go' seen r (Leaf gs s _ vs) = do let unifs = residualizeEnv s let rest = getInvocation' gs vs -- snd (fromJust $ find ((gs ==) . fst) invocations) mkGoal unifs rest go' seen r (Conj ch gs s) = do let unifs = residualizeEnv s let rest = getInvocation r gs <|> (conj $ catMaybes $ map (go seen False) ch) mkGoal unifs rest go' seen r (Gen ch gs gs' gen s) = do let unifs = residualizeEnv s let generalizer = residualizeEnv gen let rest = getInvocation r gs <|> (conj $ catMaybes [go seen False ch]) mkGoal (mkGoal unifs generalizer) rest go' seen r (Prune _ _) = error "Failed to residualize: Prune node in tree" mkGoal (Just u) (Just r) = Just (f u r) where f (Conjunction x x' xs) (Conjunction y y' ys) = Conjunction x x' (xs ++ y : y' : ys) f g (Conjunction y y' ys) = Conjunction g y (y' : ys) f (Conjunction x x' xs) g = Conjunction x x' (xs ++ [g]) f g h = Conjunction g h [] mkGoal (Just u) Nothing = Just u mkGoal Nothing (Just r) = Just r mkGoal _ _ = Nothing -- getNewVars seen goal subst = -- let vg = concatMap fvgs goal in -- let vs = map fst subst ++ concatMap (fv . snd) subst in -- (nub $ union vg vs) \\ seen getInvocation True gs = Nothing getInvocation _ gs = let res = snd <$> (find ((gs ==) . fst) invocations) in -- let res = snd <$> (find ((gs `isVariant`) . fst) invocations) in res getInvocation' gs v = return $ generateInvocation' definitions gs v -- go :: ConsPDTree -> [[G S]] -- go Fail = [[fail]] -- go (Success ss _) | null ss = [[success]] -- go (Success ss _) = [residualizeEnv ss] -- go (Or ch _ s) = let unifs = residualizeEnv s in concatMap (map (unifs ++) . go) ch -- go (Conj ch _ s) = let unifs = residualizeEnv s in map (unifs ++) $ concat $ productList $ map go ch -- go (Split ch _ s) = let unifs = residualizeEnv s in map (unifs ++) $ concat $ productList $ map go ch -- go (Leaf gs s _ _) = [residualizeEnv s ++ [snd (fromJust $ find ((gs ==) . fst) invocations)]] -- go (Gen _ _ _) = error "Failed to residualize: Gen node in tree" -- go (Prune _ _) = error "Failed to residualize: Prune node in tree" renameAmbigousVars :: ConsPDTree -> ConsPDTree renameAmbigousVars tree = tree -- go (getVars (fromJust $ nodeContent tree) []) tree -- where -- go seen (Or ch d@(Descend gs _) s) = -- let vs = getNewVars seen gs s in -- (Or (map (go (seen ++ vs)) ch) d s) -- go seen t@(Split ch gs s) = -- let vs = getNewVars seen gs s in -- let m = maxVar t + 1 in -- let renamed = map (\(n, t) -> renameTree (n*m) seen t) (zip [1..] ch) in -- (Split (map (go (seen ++ vs)) renamed) gs s) -- go seen (Conj _ _ _) = error "Failed to rename: Conj node in tree " -- go seen (Gen _ _ _) = error "Failed to rename: Gen node in tree" -- go seen (Prune _ _) = error "Failed to rename: Prune node in tree" -- go _ g = g -- getNewVars seen goal subst = -- let vars = getVars goal subst in -- -- trace (printf "\n\nNewVars\nGoal\n%s\nVars\n%s\n" (show goal) (show $ vars \\ seen)) $ -- (vars \\ seen) -- getVars goal subst = -- let vg = concatMap fvgs goal in -- let vs = map fst subst ++ concatMap (fv . snd) subst in -- nub $ union vg vs -- maxVar :: ConsPDTree -> Int -- maxVar = maximum . getVarsTree -- getVarsTree (Success s _) = getVars [] s -- getVarsTree (Or ch (Descend gs y) s) = nub $ (concatMap getVarsTree ch) ++ getVars gs s -- getVarsTree (Split ch gs s) = nub $ (concatMap getVarsTree ch) ++ getVars gs s -- getVarsTree (Leaf gs s _ _) = getVars gs s -- getVarsTree _ = [] -- renameTree :: Int -> FN.FreshNames -> ConsPDTree -> ConsPDTree -- renameTree n seen = -- go -- where -- f = renameVars n seen -- go Fail = Fail -- go (Success s g) = Success (renameSubst s) g -- go (Or ch (Descend x y) s) = Or (map go ch) (Descend (renameGoals x) y) (renameSubst s) -- go (Split ch gs s) = Split (map go ch) (renameGoals gs) (renameSubst s) -- go (Leaf gs s g v) = Leaf (renameGoals gs) (renameSubst s) g v -- go g = g -- renameGoals = ((f <$>) <$>) -- renameSubst = map (\(v,t) -> (f v, f <$> t)) -- renameVars :: Int -> FN.FreshNames -> (S -> S) -- renameVars n seen = -- rename -- where -- rename v | v `elem` seen = v -- rename v = v + n -- disj :: [G S] -> (G S) -- disj xs = foldl1 (:\/:) xs -- conj :: [G S] -> (G S) -- conj xs = foldl1 (:/\:) xs generateGoal :: G S -> [S] -> G X generateGoal g ns = (Res.vident <$> g) collectLeaves :: ConsPDTree -> [([G S], [G S])] collectLeaves (Leaf gs _ _ v) = [(gs, v)] collectLeaves (Or ch _ _) = concatMap collectLeaves ch collectLeaves (Conj ch _ _) = concatMap collectLeaves ch collectLeaves (Gen ch gs gs' _ _) = collectLeaves ch collectLeaves (Split [x] _ _) = collectLeaves x collectLeaves (Split ch _ _) = let children = catMaybes $ map (\x -> do y <- nodeContent x return (y,y)) ch in let deeperLeaves = concatMap collectLeaves ch in children ++ deeperLeaves collectLeaves (Prune _ _) = error "Cannot residualize a tree with Prune nodes" collectLeaves g = [] collectGens :: ConsPDTree -> [([G S], [G S])] collectGens (Leaf gs _ _ v) = [] collectGens (Or ch _ _) = concatMap collectGens ch collectGens (Conj ch _ _) = concatMap collectGens ch collectGens (Split ch _ _) = concatMap collectGens ch collectGens (Gen ch gs gs' _ _) = (gs, gs') : collectGens ch collectGens (Prune _ _) = error "Cannot residualize a tree with Prune nodes" collectGens g = []
kajigor/uKanren_transformations
src/ConsPD/Residualization.hs
bsd-3-clause
13,151
0
31
3,845
3,766
1,926
1,840
184
16
-------------------------------------------------------------------------------- -- | -- Module : Graphics.Rendering.OpenGL.Raw.MESA.ResizeBuffers -- Copyright : (c) Sven Panne 2015 -- License : BSD3 -- -- Maintainer : Sven Panne <[email protected]> -- Stability : stable -- Portability : portable -- -- The <https://www.opengl.org/registry/specs/MESA/resize_buffers.txt MESA_resize_buffers> extension. -- -------------------------------------------------------------------------------- module Graphics.Rendering.OpenGL.Raw.MESA.ResizeBuffers ( -- * Functions glResizeBuffersMESA ) where import Graphics.Rendering.OpenGL.Raw.Functions
phaazon/OpenGLRaw
src/Graphics/Rendering/OpenGL/Raw/MESA/ResizeBuffers.hs
bsd-3-clause
663
0
4
78
37
31
6
3
0
module Main ( main ) where import qualified Spec.Async import qualified Spec.WaitGroup import qualified Spec.RWLock import qualified Spec.Aeson import Test.Hspec main :: IO () main = hspec $ do Spec.Async.spec Spec.WaitGroup.spec Spec.RWLock.spec Spec.Aeson.spec
Yuras/qase
spec/spec.hs
bsd-3-clause
275
0
8
45
81
47
34
13
1
module Data.Singletons.TH.Single.Fixity where import Prelude hiding ( exp ) import Language.Haskell.TH hiding ( cxt ) import Language.Haskell.TH.Syntax (NameSpace(..), Quasi(..)) import Data.Singletons.TH.Options import Data.Singletons.TH.Util import Language.Haskell.TH.Desugar -- Single a fixity declaration. singInfixDecl :: forall q. OptionsMonad q => Name -> Fixity -> q (Maybe DLetDec) singInfixDecl name fixity = do opts <- getOptions mb_ns <- reifyNameSpace name case mb_ns of -- If we can't find the Name for some odd reason, -- fall back to singValName Nothing -> finish $ singledValueName opts name Just VarName -> finish $ singledValueName opts name Just DataName -> finish $ singledDataConName opts name Just TcClsName -> do mb_info <- dsReify name case mb_info of Just (DTyConI DClassD{} _) -> finish $ singledClassName opts name _ -> pure Nothing -- Don't produce anything for other type constructors (type synonyms, -- type families, data types, etc.). -- See [singletons-th and fixity declarations], wrinkle 1. where finish :: Name -> q (Maybe DLetDec) finish = pure . Just . DInfixD fixity -- Try producing singled fixity declarations for Names by reifying them -- /without/ consulting quoted declarations. If reification fails, recover and -- return the empty list. -- See [singletons-th and fixity declarations], wrinkle 2. singReifiedInfixDecls :: forall q. OptionsMonad q => [Name] -> q [DDec] singReifiedInfixDecls = mapMaybeM trySingFixityDeclaration where trySingFixityDeclaration :: Name -> q (Maybe DDec) trySingFixityDeclaration name = qRecover (return Nothing) $ do mFixity <- qReifyFixity name case mFixity of Nothing -> pure Nothing Just fixity -> fmap (fmap DLetDec) $ singInfixDecl name fixity {- Note [singletons-th and fixity declarations] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Promoting and singling fixity declarations is surprisingly tricky to get right. This Note serves as a place to document the insights learned after getting this wrong at various points. As a general rule, when promoting something with a fixity declaration like this one: infixl 5 `foo` singletons-th will produce promoted and singled versions of them: infixl 5 `Foo` infixl 5 `sFoo` singletons-th will also produce fixity declarations for its defunctionalization symbols (see Note [Fixity declarations for defunctionalization symbols] in D.S.TH.Promote.Defun): infixl 5 `FooSym0` infixl 5 `FooSym1` ... ----- -- Wrinkle 1: When not to promote/single fixity declarations ----- Rules are meant to be broken, and the general rule above is no exception. There are certain cases where singletons-th does *not* produce promoted or singled versions of fixity declarations: * During promotion, fixity declarations for the following sorts of names will not receive promoted counterparts: - Data types - Type synonyms - Type families - Data constructors - Infix values We exclude the first four because the promoted versions of these names are the same as the originals, so generating an extra fixity declaration for them would run the risk of having duplicates, which GHC would reject with an error. We exclude infix value because while their promoted versions are different, they share the same name base. In concrete terms, this: $(promote [d| infixl 4 ### (###) :: a -> a -> a |]) Is promoted to the following: type family (###) (x :: a) (y :: a) :: a where ... So giving the type-level (###) a fixity declaration would clash with the existing one for the value-level (###). There *is* a scenario where we should generate a fixity declaration for the type-level (###), however. Imagine the above example used the `promoteOnly` function instead of `promote`. Then the type-level (###) would lack a fixity declaration altogether because the original fixity declaration was discarded by `promoteOnly`! The same problem would arise if one had to choose between the `singletons` and `singletonsOnly` functions. The difference between `promote` and `promoteOnly` (as well as `singletons` and `singletonsOnly`) is whether the `genQuotedDecs` option is set to `True` or `False`, respectively. Therefore, if `genQuotedDecs` is set to `False` when promoting the fixity declaration for an infix value, we opt to generate a fixity declaration (with the same name base) so that the type-level version of that value gets one. * During singling, the following things will not have their fixity declarations singled: - Type synonyms or type families. This is because singletons-th does not generate singled versions of them in the first place (they only receive defunctionalization symbols). - Data types. This is because the singled version of a data type T is always of the form: data ST :: forall a_1 ... a_n. T a_1 ... a_n -> Type where ... Regardless of how many arguments T has, ST will have exactly one argument. This makes is rather pointless to generate a fixity declaration for it. ----- -- Wrinkle 2: Making sure fixity declarations are promoted/singled properly ----- There are two situations where singletons-th must promote/single fixity declarations: 1. When quoting code, i.e., with `promote` or `singletons`. 2. When reifying code, i.e., with `genPromotions` or `genSingletons`. In the case of (1), singletons-th stores the quoted fixity declarations in the lde_infix field of LetDecEnv. Therefore, it suffices to call promoteInfixDecl/singleInfixDecl when processing LetDecEnvs. In the case of (2), there is no LetDecEnv to use, so we must instead reify the fixity declarations and promote/single those. See D.S.TH.Single.Data.singDataD (which singles data constructors) for a place that does this—we will use singDataD as a running example for the rest of this section. One complication is that code paths like singDataD are invoked in both (1) and (2). This runs the risk that singletons-th will generate duplicate infix declarations for data constructors in situation (1), as it will try to single their fixity declarations once when processing them in LetDecEnvs and again when reifying them in singDataD. To avoid this pitfall, when reifying declarations in singDataD we take care *not* to consult any quoted declarations when reifying (i.e., we do not use reifyWithLocals for functions like it). Therefore, it we are in situation (1), then the reification in singDataD will fail (and recover gracefully), so it will not produce any singled fixity declarations. Therefore, the only singled fixity declarations will be produced by processing LetDecEnvs. -}
goldfirere/singletons
singletons-th/src/Data/Singletons/TH/Single/Fixity.hs
bsd-3-clause
6,813
0
18
1,318
437
228
209
-1
-1
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE KindSignatures #-} module EFA.Application.Optimisation.Base where import qualified EFA.Application.Optimisation.Optimisation as Optimisation import qualified EFA.Application.Optimisation.DoubleSweep as DoubleSweep import qualified EFA.Application.Optimisation.ReqsAndDofs as ReqsAndDofs import qualified EFA.Application.Type as Type import qualified EFA.Application.Optimisation.Balance as Balance import qualified EFA.Application.Optimisation.Params as Params import qualified EFA.Application.Optimisation.Sweep as Sweep import qualified EFA.Application.Optimisation as AppOpt import qualified EFA.Application.Utility as ModUt import EFA.Application.Type (EnvResult) import qualified EFA.Flow.Topology.Record as TopoRecord import qualified EFA.Flow.Topology.Quantity as TopoQty import qualified EFA.Flow.Topology.Index as TopoIdx import qualified EFA.Flow.State.Index as StateIdx import qualified EFA.Flow.State.Quantity as StateQty import qualified EFA.Flow.State as State import qualified EFA.Flow.Sequence as Sequence import qualified EFA.Flow.Sequence.Quantity as SeqQty import qualified EFA.Flow.Part.Map as PartMap import qualified EFA.Flow.Storage as Storage import qualified EFA.Flow.SequenceState.Index as Idx import qualified EFA.Graph as Graph import qualified EFA.Graph.Topology.Node as Node import qualified EFA.Signal.Signal as Sig import qualified EFA.Signal.Record as Record import qualified EFA.Signal.Sequence as Sequ import qualified EFA.Signal.Vector as Vec import EFA.Signal.Data (Data(Data), Nil, (:>)) import EFA.Signal.Typ (Typ) import qualified EFA.Equation.Arithmetic as Arith import EFA.Equation.Arithmetic ((~+), (~-), (~*), (~/)) import EFA.Equation.Result (Result(Determined,Undetermined)) import qualified Data.Map as Map; import Data.Map (Map) import qualified Data.List as List import qualified Data.Set as Set import qualified Data.Vector.Unboxed as UV import Data.Vector (Vector) import Data.Monoid (Monoid) import Data.Maybe (fromMaybe) import Control.Monad (join) import Control.Applicative (liftA2) -- import Debug.Trace (trace) perStateSweep :: (Node.C node, Show node,RealFloat a, Ord a, Show a, UV.Unbox a, Arith.ZeroTestable (sweep vec a), Arith.Product (sweep vec a), Arith.Constant a, Sweep.SweepVector vec a, Sweep.SweepMap sweep vec a a, Sweep.SweepClass sweep vec a, Monoid (sweep vec Bool), Sweep.SweepMap sweep vec a Bool, Sweep.SweepClass sweep vec Bool) => Params.System node a -> Params.Optimisation node list sweep vec a -> StateQty.Graph node (Result (sweep vec a)) (Result (sweep vec a)) -> Map Idx.State (Map (list a) (Type.SweepPerReq node sweep vec a)) perStateSweep sysParams optParams stateFlowGraph = Map.mapWithKey f states where states = StateQty.states stateFlowGraph reqsAndDofs = map TopoIdx.Power $ ReqsAndDofs.unReqs (Params.reqsPos optParams) ++ ReqsAndDofs.unDofs (Params.dofsPos optParams) f state _ = DoubleSweep.doubleSweep solveFunc (Params.points optParams) where solveFunc = Optimisation.solve optParams reqsAndDofs (AppOpt.eraseXAndEtaFromState state stateFlowGraph) (Params.etaAssignMap sysParams) (Params.etaMap sysParams) state balForcing :: (Ord node, Show node, Sweep.SweepClass sweep vec a, Arith.Sum (sweep vec a), Sweep.SweepMap sweep vec a a, Arith.Constant a) => Balance.Forcing node a -> Params.Optimisation node list sweep vec a -> Type.StoragePowerMap node sweep vec a -> Result (sweep vec a) balForcing balanceForcing params powerMap = Map.foldWithKey f zero (Balance.unForcingMap balanceForcing) where zero = Determined $ Sweep.fromRational (Params.sweepLength params) Arith.zero f stoNode forcingFactor acc = g acc force where g (Determined ac) (Determined fo) = Determined $ ac ~+ fo g _ _ = Undetermined force = fmap (Sweep.map (Balance.getSocDrive forcingFactor ~*)) stoPower stoPower = fromMaybe (error $ "forcing failed, because node not found: " ++ show stoNode) (join $ Map.lookup stoNode powerMap) optStackPerState :: (UV.Unbox a, Arith.Sum a, Sweep.SweepClass sweep UV.Vector a, Ord node, Show node, Arith.Sum (sweep UV.Vector a), Arith.Constant a, Sweep.SweepClass sweep UV.Vector (a, a)) => Params.Optimisation node list sweep UV.Vector a -> Balance.Forcing node a -> Map Idx.State (Map [a] (Type.SweepPerReq node sweep UV.Vector a)) -> Type.OptStackPerState sweep UV.Vector a optStackPerState params balanceForcing = Map.map $ Map.map $ DoubleSweep.objectiveValue (balForcing balanceForcing params) optimalObjectivePerState :: (Ord a, Arith.Constant a, Arith.Sum a, UV.Unbox a,RealFloat a, Show node, Node.C node, Monoid (sweep UV.Vector Bool), Ord (sweep UV.Vector a), Arith.Product (sweep UV.Vector a), Sweep.SweepVector UV.Vector Bool, Sweep.SweepClass sweep UV.Vector Bool, Sweep.SweepMap sweep UV.Vector a Bool, Sweep.SweepVector UV.Vector a, Sweep.SweepClass sweep UV.Vector a, Sweep.SweepMap sweep UV.Vector a a) => Params.Optimisation node list sweep UV.Vector a -> Balance.Forcing node a -> Map Idx.State (Map [a] (Type.SweepPerReq node sweep UV.Vector a)) -> Type.OptimalSolutionPerState node a optimalObjectivePerState params balanceForcing = Map.map $ Map.map $ DoubleSweep.optimalSolutionState2 (balForcing balanceForcing params) expectedValuePerState :: (UV.Unbox a, Arith.Constant a, Sweep.SweepClass sweep UV.Vector a, Sweep.SweepClass sweep UV.Vector Bool) => Map Idx.State (Map (list a) (Type.SweepPerReq node sweep UV.Vector a)) -> Map Idx.State (Map (list a) (Maybe a)) expectedValuePerState = Map.map (Map.map DoubleSweep.expectedValue) {- -- TODO: is this code is still neeed for Display purposes ? -- needs to work with new StateForcing -- does it make sense ? selectOptimalState :: (Ord a,Arith.Sum a,Show (Params.StateForcing a), Show a,RealFloat a) => Params.Optimisation node list sweep vec a -> Map Idx.AbsoluteState (Params.StateForcing a) -> Type.OptimalSolutionPerState node a -> Params.IndexConversionMap -> Type.OptimalSolution node a selectOptimalState _params stateForcing stateMap indexConversionMap = let g _ Nothing y = y g _ x Nothing = x g f (Just x) (Just y) = Just (f x y) in List.foldl1' (Map.unionWith (g $ ModUt.maxByWithNaN ModUt.fst5)) $ map (\(st, m) -> Map.map (fmap (\(objVal, eta, idx ,env) -> (objVal Arith.~+ maybe (error "Base.selectOptimalState") Params.unStateForcing (ModUt.state2absolute st indexConversionMap >>= flip Map.lookup stateForcing), eta, st, idx, env))) m) $ Map.toList stateMap -} supportPoints :: (Ord a,Show (vec a),Vec.Len (vec a),Node.C node, Vec.Unique vec [a], Vec.Storage vec ([[a]], [Sig.SignalIdx]), Vec.Storage vec Sig.SignalIdx, Vec.Storage vec Int, Vec.Storage vec ([a]), Vec.FromList vec, Vec.Find vec, Vec.Filter vec, Vec.Zipper vec, Vec.Walker vec, Vec.Storage vec a, Vec.Singleton vec, Arith.Constant a, Show a, Vec.Storage vec Bool, Vec.Lookup vec) => [ TopoIdx.Position node] -> Record.PowerRecord node vec a -> [(a -> [a])] -> Sig.UTDistr vec ([[a]], [Sig.SignalIdx]) supportPoints idList rec functList = Sig.getActiveSupportPointsND $ zip functList $ map (Sig.untype . Record.getSig rec) idList envToPowerRecord :: (Ord node) => TopoQty.Section node (Result (Data (v :> Nil) a)) -> Record.PowerRecord node v a envToPowerRecord = TopoRecord.sectionToPowerRecord . TopoQty.mapSection (ModUt.checkDetermined "envToPowerRecord") convertRecord :: (Vec.Storage v d2, Vec.Storage t d2, Vec.Storage v d1, Vec.Storage t d1, Vec.Convert t v) => Record.Record s1 s2 t1 t2 id t d1 d2 -> Record.Record s1 s2 t1 t2 id v d1 d2 convertRecord (Record.Record time sigMap) = Record.Record (Sig.convert time) (Map.map Sig.convert sigMap) consistentRecord :: (Ord t5, Show t5, Arith.Constant t5) => Record.Record t t3 t1 t4 k [] t2 t5 -> Bool consistentRecord (Record.Record _ m) = case Map.elems m of [xs, ys] -> consistentIndices xs ys zs -> error $ "consistentRecord: more or less than exactly two signals: " ++ show zs where consistentIndices (Sig.TC (Data xs)) (Sig.TC (Data ys)) = let zs = xs ++ ys in all (<= Arith.zero) zs || all (Arith.zero <=) zs consistentSection :: (Ord t5, Show t5, Node.C node, Arith.Constant t5) => Params.System node a -> Sequ.Section (Record.Record t t3 t1 t4 (TopoIdx.Position node) [] t2 t5) -> Bool consistentSection sysParams (Sequ.Section _ _ rec) = let recs = map f $ Graph.edges $ Params.systemTopology sysParams f (Graph.DirEdge fr to) = Record.extract [TopoIdx.ppos fr to, TopoIdx.ppos to fr] rec in all consistentRecord recs filterPowerRecordList :: (Ord a, Show a, Arith.Constant a, Node.C node) => Params.System node a -> Sequ.List (Record.PowerRecord node [] a) -> ( Sequ.List (Record.PowerRecord node [] a), Sequ.List (Record.PowerRecord node [] a) ) filterPowerRecordList sysParams (Sequ.List recs) = let (ok, bad) = List.partition (consistentSection sysParams) recs in (Sequ.List ok, Sequ.List bad) -- HH: hier sollen tatsächlich params und ppos getrennt hineingefuehrt werden, -- damit man die Funktion auch für andere Positionen verwenden kann. signCorrectedOptimalPowerMatrices :: (Ord a, Arith.Sum a, Arith.Constant a, Show node, Ord node, Vec.Storage varVec (Maybe (Result a)), Vec.FromList varVec) => Params.System node a -> ReqsAndDofs.Dofs (TopoIdx.Position node) -> Map [a] (Maybe (a, a, Idx.State, Int, EnvResult node a)) -> Map (TopoIdx.Position node) (Sig.PSignal2 Vector varVec (Maybe (Result a))) signCorrectedOptimalPowerMatrices systemParams (ReqsAndDofs.Dofs ppos) m = Map.fromList $ map g ppos where g pos = (pos, ModUt.to2DMatrix $ Map.map f m) where f Nothing = Nothing f (Just (_, _, st, _, graph)) = case StateQty.lookup (StateIdx.powerFromPosition st pos) graph of Just sig -> Just $ if isFlowDirectionPositive systemParams st pos graph then sig else fmap Arith.negate sig _ -> fmap (const (Determined Arith.zero)) (getEdgeFromPosition st pos graph) isFlowDirectionPositive :: (Ord node, Show node) => Params.System node a -> Idx.State -> TopoIdx.Position node -> EnvResult node a -> Bool isFlowDirectionPositive sysParams state (TopoIdx.Position f t) graph = case Set.toList es of [Graph.DirEdge fe te] -> case flowTopoEs of Just set -> case ( Set.member (Graph.EDirEdge $ Graph.DirEdge fe te) set, Set.member (Graph.EDirEdge $ Graph.DirEdge te fe) set ) of (True, False) -> True (False, True) -> False tf -> error $ "isFlowDirectionPositive: " ++ "inconsisten flow graph " ++ show tf _ -> error $ "State (" ++ show state ++ ") not found" _ -> error $ "More or less than exactly one edge between nodes " ++ show f ++ " and " ++ show t ++ " in " ++ show es where flowTopoEs = fmap Graph.edgeSet $ ModUt.getFlowTopology state graph topo = Params.systemTopology sysParams es = Graph.adjacentEdges topo f `Set.intersection` Graph.adjacentEdges topo t getEdgeFromPosition :: (Ord (e a), Ord a, Show (e a), Show a, Graph.Edge e) => Idx.State -> TopoIdx.Position a -> State.Graph a e sectionLabel nl storageLabel el carryLabel -> Maybe (e a) getEdgeFromPosition state (TopoIdx.Position f t) = let g flowTopo = case Set.toList es of [e] -> e _ -> error $ "More or less than exactly one edge between nodes " ++ show f ++ " and " ++ show t ++ " in " ++ show es where es = Graph.adjacentEdges flowTopo f `Set.intersection` Graph.adjacentEdges flowTopo t in fmap g . ModUt.getFlowTopology state extractOptimalPowerMatricesPerState :: (Ord b, Ord node, Vec.Storage vec (vec (Maybe (Result a))), Vec.Storage vec (Maybe (Result a)), Vec.FromList vec) => Map Idx.State (Map [b] (Maybe (a1, EnvResult node a))) -> [TopoIdx.Position node] -> Map (TopoIdx.Position node) (Map Idx.State (Sig.PSignal2 Vector vec (Maybe (Result a)))) extractOptimalPowerMatricesPerState m ppos = Map.map (Map.map ModUt.to2DMatrix) $ Map.fromList $ map (\p -> (p, Map.mapWithKey (f p) m)) ppos where f p st matrixMap = Map.map g matrixMap where pos = StateIdx.powerFromPosition st p g = join . fmap (StateQty.lookup pos . snd) seqFlowBalance :: (Arith.Sum a, UV.Unbox a, Arith.Sum (sweep vec a)) => Sequence.Graph node structEdge sectionLabel nodeLabel (Result (sweep vec a)) boundaryLabel structLabel edgeLabel -> Map node (Result (sweep vec a)) seqFlowBalance = fmap (f . Storage.nodes . fst) . SeqQty.storages where f pm = liftA2 (~-) (PartMap.exit pm) (PartMap.init pm) stateFlowBalance :: (Arith.Sum a, UV.Unbox a, Arith.Sum (sweep vec a)) => EnvResult node (sweep vec a) -> Map node (Result (sweep vec a)) stateFlowBalance = fmap (f . Storage.nodes) . StateQty.storages where f pm = liftA2 (~-) (PartMap.exit pm) (PartMap.init pm) getOptimalControlMatricesOfOneState :: (Vec.Walker varVec, Vec.Storage varVec a, Arith.Constant a, Ord node, Ord a, Show node, Vec.Storage varVec (Maybe (Result a)), Vec.FromList varVec, Arith.Sum a) => Params.System node a -> Params.Optimisation node list sweep vec a -> Idx.State -> Type.OptimalSolutionOfOneState node a -> Map (TopoIdx.Position node) (Sig.PSignal2 Vector varVec a) getOptimalControlMatricesOfOneState sysParams optParams state = Map.map (Sig.map ModUt.nothing2Nan) . signCorrectedOptimalPowerMatrices sysParams (Params.dofsPos optParams) . Map.map (fmap (\(o, e, i, v) -> (o, e, state, i, v))) {- optimalMatrixOfOneState:: (Vec.Walker varVec, Vec.Storage varVec a, Arith.Constant a, Ord a, Vec.Storage varVec (Maybe (Result a)), Vec.FromList varVec) => Type.OptimalSolutionOfOneState node a -> Sig.PSignal2 Vector varVec a -} optimalMatrixOfOneState :: (Ord b, Arith.Constant d2, Vec.Walker v1, Vec.Walker v2, Vec.Storage v1 d2, Vec.Storage v2 (v1 d2), Vec.Storage v1 (Maybe (Result d2)), Vec.Storage v2 (v1 (Maybe (Result d2))), Vec.FromList v1, Vec.FromList v2) => (a -> d2) -> Map [b] (Maybe a) -> Sig.TC s (Typ x y z) (Data (v2 :> (v1 :> Nil)) d2) optimalMatrixOfOneState f = Sig.map ModUt.nothing2Nan . ModUt.to2DMatrix . Map.map (fmap (Determined . f)) optimalObjectiveMatrixOfOneState, optimalEtaMatrixOfOneState :: (Vec.Walker varVec, Vec.Storage varVec a, Arith.Constant a, Ord a, Vec.Storage varVec (Maybe (Result a)), Vec.FromList varVec) => Type.OptimalSolutionOfOneState node a -> Sig.PSignal2 Vector varVec a optimalObjectiveMatrixOfOneState = optimalMatrixOfOneState ModUt.fst4 optimalEtaMatrixOfOneState = optimalMatrixOfOneState ModUt.snd4 optimalIndexMatrixOfOneState:: (Vec.Storage varVec Int, Arith.Constant Int, Vec.Storage varVec (Maybe (Result Int))) => (Vec.Walker varVec, Vec.Storage varVec a, Arith.Constant a, Ord a, Vec.Storage varVec (Maybe (Result a)), Vec.FromList varVec) => Type.OptimalSolutionOfOneState node a -> Sig.PSignal2 Vector varVec Int optimalIndexMatrixOfOneState = optimalMatrixOfOneState ModUt.thd4 genOptimalObjectiveSignal :: (Vec.Zipper vec,Ord a,Show (vec Bool),Show (vec a),RealFloat a, Vec.Walker vec, Vec.Storage vec a) => Type.InterpolationOfAllStates node vec a -> Sig.UTSignal vec a genOptimalObjectiveSignal interpolation = Map.foldl' (Sig.zipWith (ModUt.maxByWithNaN id)) h t where objectiveSigPerState = Map.map Type.optObjectiveSignalOfState interpolation (h, t) = case Map.minView objectiveSigPerState of Just x -> x Nothing -> error "genOptimalObjectiveSignal: empty interpolation map" myTrace :: Show a => String -> a -> a myTrace _str x = x -- trace (str ++ ": " ++ show x) x findOptimalObjectiveStates :: (Vec.Zipper vec,Ord a,Vec.Storage vec Bool,Show (vec Bool), Vec.Singleton vec,Show (vec a),RealFloat a,Show a, Arith.Sum a, Vec.Walker vec, Vec.Storage vec a) => Balance.StateForcing -> Type.InterpolationOfAllStates node vec a -> Map Idx.State (Sig.UTSignal vec Bool) findOptimalObjectiveStates statForcing interpolation = Map.map (g . f . Type.optObjectiveSignalOfState) interpolation where opt = genOptimalObjectiveSignal interpolation f = forceOptimalStateSignal statForcing opt g = Sig.zipWith (==) opt forceOptimalStateSignal :: (Vec.Walker vec, Arith.Sum a,Vec.Zipper vec, Show a,RealFloat a, Ord a, Vec.Storage vec a, Vec.Singleton vec) => Balance.StateForcing -> Sig.UTSignal vec a -> Sig.UTSignal vec a -> Sig.UTSignal vec a forceOptimalStateSignal stateForcing overallOptimalSignal optimalSignalOfState = case stateForcing of Balance.StateForcingOn -> Sig.offset minimalDifference optimalSignalOfState Balance.StateForcingOff -> optimalSignalOfState where differenceSignal = overallOptimalSignal Sig..- optimalSignalOfState minimalDifference = Sig.fromScalar $ Sig.minimumWithNaN differenceSignal genOptimalStatesSignal :: (Ord a,Vec.Storage vec [Idx.State],Show (vec [Idx.State]),Show (vec Bool), Vec.Singleton vec,Show (vec a),Show a, Arith.Sum a, Vec.Zipper vec, Vec.Walker vec, Vec.Storage vec a,RealFloat a, Vec.Storage vec Bool) => Balance.StateForcing -> Type.InterpolationOfAllStates node vec a -> Sig.UTSignal vec [Idx.State] genOptimalStatesSignal statForcing interpolation = Map.foldlWithKey' (flip (Sig.zipWith . f)) emptyIndexSignal optStates where optStates = findOptimalObjectiveStates statForcing interpolation f st a b = a ++ (if b then [st] else []) time = Record.getTime $ Type.reqsAndDofsSignalsOfState $ ModUt.findMinElem interpolation emptyIndexSignal = Sig.untype $ Sig.map (const []) time -- TODO test bauen:: -- TC (Data [0.0,0.3333333333333333,0.6666666666666666,1.0,1.25,1.5,1.75,2.0]) -- genOptimalTime (Sig.fromList [[Idx.State 0],[Idx.State 1, Idx.State 1, Idx.State 1, Idx.State 0],[Idx.State 1]]) (Sig.fromList [0,1,2]) :: Sig.TSignal [] Double genOptimalSteppedTime :: (Vec.Zipper vec,Eq a,Show a,Show (vec a), Vec.Walker vec, Vec.Storage vec (a, a), Arith.Constant a, Vec.FromList vec, Vec.Storage vec a, Vec.Singleton vec, Vec.Storage vec [Idx.State]) => Sig.UTSignal vec [Idx.State] -> Sig.TSignal vec a -> Sig.TSignal vec a genOptimalSteppedTime indexSignal time = Sig.fromList $ concat $ zipWith f is ts where f states (t1, t2) = if t1 == t2 then [] else concat $ zipWith (\x y -> [x, y]) leftTimes rightTimes where leftTimes = map (g . convert) [0 .. len] rightTimes = case leftTimes of (_:xs) -> xs _ -> error "genOptimalSteppedTime: empty time list" len = length states convert = Arith.fromRational . fromIntegral g cnt = t1 ~+ (cnt ~* (t2 ~- t1) ~/ convert len) is = Sig.toList indexSignal ts = Sig.toList $ Sig.deltaMap (,) time genOptimalSteppedSignal :: (Vec.Storage vec [Idx.State], Eq a,Vec.Storage vec (a, a), Show (vec a), Vec.Singleton vec,Show a, Vec.Zipper vec, Vec.Walker vec, Vec.Storage vec a, Vec.FromList vec, Vec.Storage vec (Map Idx.State a)) => Sig.UTSignal vec [Idx.State] -> Sig.TSignal vec a -> Map Idx.State (Sig.PSignal vec a) -> Sig.PSignal vec a genOptimalSteppedSignal indexSignal time signalMap = Sig.fromList $ concat $ zipWith3 g is ts signalOfMaps where g states (t1, t2) m = if t1 == t2 then [] else concatMap (h m) states h m st = let x = fromMaybe (error $ err st m) (Map.lookup st m) in [x, x] err x m = "genOptimalSteppedSignal: Element " ++ show x ++ " not found in " ++ show m {- wofür ist das denn da? xlast = case (signalOfMaps, is) of (m:_, xs@(x:_):_) -> let x = last xs in fromMaybe (error $ err x m) (Map.lookup x m) _ -> error "genOptimalSteppedSignal: empty list" -} signalOfMaps = Sig.toList $ Map.foldrWithKey' (Sig.zipWith . Map.insert) emptySig signalMap is = Sig.toList indexSignal ts = Sig.toList $ Sig.deltaMap (,) time emptySig = Sig.map (const Map.empty) $ snd $ Map.findMin signalMap
energyflowanalysis/efa-2.1
src/EFA/Application/Optimisation/Base.hs
bsd-3-clause
21,490
0
17
4,912
6,906
3,587
3,319
-1
-1
{-# LANGUAGE TemplateHaskell #-} module AnimatedDangerzone.Types where import Data.Binary import Data.Map (Map) import Control.Lens import NetworkedGame.Handles type Coord = (Int,Int) data ClientMsg = ClientMove Coord | ClientHello String deriving (Read, Show) data ServerMsg = SetWorld World | Hello ConnectionId | UsernameConflict | QuitPlayer ConnectionId | NewPlayer ConnectionId Player | MovePlayer ConnectionId Coord deriving (Read, Show) data World = World { _worldBlocks :: Map Coord Block , _worldPlayers :: Map ConnectionId Player } deriving (Read, Show) data Block = Rock | Rubble | Ice | Lava | Stones | Air deriving (Read, Show, Ord, Eq) data Player = Player { _playerName :: String , _playerCoord :: Coord } deriving (Read, Show) instance Binary ClientMsg where put = put . show get = read `fmap` get instance Binary ServerMsg where put = put . show get = read `fmap` get makeLenses ''Player makeLenses ''World
glguy/animated-dangerzone
src/AnimatedDangerzone/Types.hs
bsd-3-clause
993
0
9
210
303
173
130
43
0
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} module React.Flux.Mui.Table.TableHeader where import Protolude import Data.Aeson import Data.Aeson.Casing import Data.String (String) import React.Flux import React.Flux.Mui.Util data TableHeader = TableHeader { tableHeaderAdjustForCheckbox :: !(Maybe Bool) , tableHeaderClassName :: !(Maybe Text) , tableHeaderDisplaySelectAll :: !(Maybe Bool) , tableHeaderEnableSelectAll :: !(Maybe Bool) , tableHeaderSelectAllSelected :: !(Maybe Bool) } deriving (Generic, Show) instance ToJSON TableHeader where toJSON = genericToJSON $ aesonDrop (length ("TableHeader" :: String)) camelCase defTableHeader :: TableHeader defTableHeader = TableHeader { tableHeaderAdjustForCheckbox = Just True , tableHeaderClassName = Nothing , tableHeaderDisplaySelectAll = Just True , tableHeaderEnableSelectAll = Just True , tableHeaderSelectAllSelected = Just False } tableHeader_ :: TableHeader -> [PropertyOrHandler handler] -> ReactElementM handler () -> ReactElementM handler () tableHeader_ args props = foreign_ "TableHeader" (fromMaybe [] (toProps args) ++ props)
pbogdan/react-flux-mui
react-flux-mui/src/React/Flux/Mui/Table/TableHeader.hs
bsd-3-clause
1,165
0
11
180
292
162
130
44
1
module Lexicon where import LG.Base import LG.Term import Data.Maybe data Entry = LexEntry { term :: NodeTerm, formula :: Formula } deriving (Show) -- Returns the lexical entries to which the given strings map. We assume for simplicity that one word maps to at most one lexical entry. entries :: [String] -> Maybe [Entry] entries [] = Just [] entries (x:xs) = case Lexicon.lookup x of Just x' -> entries xs >>= Just . (x':) Nothing -> Nothing -- entries xs >>= (Lexicon.lookup x:) -- Some often used formulae npP = P (AtomP "np") nP = P (AtomP "n") sP = P (AtomP "s") sN = N (AtomN "s") det = N (npP :/: nP) -- np/n simpleVerb = N (npP :\: sN) -- np\s- tv = N (simpleVerb :/: npP) -- (np\s-) / np tvs = N (simpleVerb :/: sN) -- (np\s-) / s- sub = P ((N (npP:/:nP)):<×>: nP) -- Utility functions for creating atomic terms va name = Va (Variable name) ev name = Ev (Covariable name) -- Example lexicon lookup "Mary" = Just $ LexEntry (va "m") npP lookup "likes" = Just $ LexEntry (va "likes") tv lookup "thinks" = Just $ LexEntry (va "think") tvs lookup "Sambam" = Just $ LexEntry (va "think") (P ((P (sN :</>: sN)) :<\>: npP)) lookup "left" = Just $ LexEntry (va "left") simpleVerb lookup "John" = Just $ LexEntry (va "j") npP lookup "the" = Just $ LexEntry (va "the") det lookup "horse" = Just $ LexEntry (va "horse") nP lookup "s" = Just $ LexEntry (ev "s") sN -- Fig 15 lookup "figure15" = Just $ LexEntry (va "f") f where f = P( N ( P (AtomP "a"):/:P (AtomP "b")):<×>: (P (AtomP "b"))) -- Fig 18 lookup "sub" = Just $ LexEntry (va "sub") sub lookup "tv" = Just $ LexEntry (va "tv") tv lookup "det" = Just $ LexEntry (va "det") det lookup "noun" = Just $ LexEntry (va "noun") nP lookup "every" = Just $ LexEntry (va "every") (N $ (N $ sN :/: (N $ npP :\: sN)) :/: nP) lookup "barber" = Just $ LexEntry (va "barber") nP lookup "shaves" = Just $ LexEntry (va "shaves") (N $ npP :\: (N $ sN :/: npP)) lookup "himself" = Just $ LexEntry (va "himself") (N $ (N $ (N $ npP :\: sN) :/: npP) :\: (N $ npP :\: sN)) lookup _ = Nothing
jgonggrijp/net-prove
src/Lexicon.hs
bsd-3-clause
2,389
0
15
761
927
473
454
44
2
module Week2 where import Log parseMessage :: String -> LogMessage parseMessage xs= let list = words xs in case list of ("I":line:msg) -> LogMessage Info (read line) (unwords msg) ("W":line:msg) -> LogMessage Warning (read line) (unwords msg) ("E":line:level:msg) -> LogMessage (Error (read line)) (read level) (unwords msg) _ -> Unknown (unwords list) parse :: String -> [LogMessage] --parse xs = map parseMessage $ lines xs parse = map parseMessage . lines insert :: LogMessage -> MessageTree -> MessageTree insert msg@(LogMessage _ _ _) Leaf = Node Leaf msg Leaf insert msg1@(LogMessage _ t1 _) (Node left msg2@(LogMessage _ t2 _) right) | t1 < t2 = Node (insert msg1 left) msg2 right | otherwise = Node left msg2 (insert msg1 right) insert _ tree = tree build :: [LogMessage] -> MessageTree {--point full style build [] = Leaf build (msg : xsmsg) = insert msg (build xsmsg) --} {--haskell is very easy to abstract a function, foint free style : doesn't care about the data--} build = foldr insert Leaf inOrder :: MessageTree -> [LogMessage] inOrder Leaf = [] inOrder (Node leftTree msg rightTree) = inOrder leftTree ++ [msg] ++ inOrder rightTree whatWentWrong :: [LogMessage] -> [String] whatWentWrong = extractMessage . inOrder . build . filter (filterMessage 50) -- filter the error logmessage and then filter > 50 error filterMessage :: Int -> LogMessage -> Bool filterMessage minError (LogMessage (Error lvl) _ _) | minError <= lvl = True | otherwise = False filterMessage _ _ = False extractMessage :: [LogMessage]-> [String] extractMessage (LogMessage _ _ info : msgs) = info : extractMessage msgs extractMessage _ = []
zach007/cis194
src/Week2.hs
bsd-3-clause
1,737
0
14
375
599
305
294
32
4
{-# LANGUAGE RankNTypes, GADTs #-} module Andromeda.Common.Exists where import Unsafe.Coerce (unsafeCoerce) data Exists f where Exists :: f a -> Exists f mkExists :: forall f a. f a -> Exists f mkExists = unsafeCoerce runExists :: forall f r. (forall a. f a -> r) -> (Exists f -> r) runExists = unsafeCoerce
graninas/Andromeda
src/Andromeda/Common/Exists.hs
bsd-3-clause
315
0
10
61
109
62
47
9
1
{-# LANGUAGE NoMonomorphismRestriction #-} import Diagrams.Prelude import Diagrams.Coordinates import Diagrams.Backend.Cairo.CmdLine import Rayout main = defaultMain $ tb "Navigation" 1600 32 === (tb "Logo" 300 64 ||| tb "Location" 200 32 ||| strutX 100 ||| tb "Search" 600 48) # centerX === tb "Banner" 1200 256 === ( ( tb "Search panel" 800 256 === ( (tb "Boots" 600 128 === tb "Dress shoes" 600 128) # centerY ||| tb "Brands" 200 256 )# centerX === tb "Video" 800 192 === ( tb "Shop list" 160 384 # alignT ||| ( tb "Shop attributes" 640 128 === tb "Shop inventory" 640 256 ) # alignT ) # centerX ) # alignT ||| ( tb "Gadget panel" 400 128 === tb "Shipping cost calc" 400 192 === tb "Top shoes" 400 256 === tb "Viewed items" 400 256 === tb "Top shops" 400 256 ) # alignT ) # centerX === (tb "Links" 600 128 ||| tb "Tips" 600 128) # centerX === tb "Search" 600 48 === tb "Footer" 1600 144
ducis/rayout
1.hs
bsd-3-clause
1,024
60
26
304
403
198
205
53
1
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE RecordWildCards #-} {-# OPTIONS_HADDOCK -ignore-exports #-} -- | A simple OAuth2 Haskell binding. (This is supposed to be -- independent of the http client used.) module Network.OAuth.OAuth2.Internal where import Control.Applicative import Control.Arrow (second) import Control.Monad.Catch import Data.Aeson import Data.Aeson.Types (Parser, explicitParseFieldMaybe) import Data.Binary (Binary) import qualified Data.ByteString as BS import qualified Data.ByteString.Lazy as BSL import Data.Default import Data.Maybe import Data.Text (Text, pack, unpack) import Data.Text.Encoding import GHC.Generics import Lens.Micro import Lens.Micro.Extras import Network.HTTP.Conduit as C import qualified Network.HTTP.Types as H import qualified Network.HTTP.Types as HT import URI.ByteString import URI.ByteString.Aeson () import URI.ByteString.QQ -------------------------------------------------- -- * Data Types -------------------------------------------------- -- | Query Parameter Representation data OAuth2 = OAuth2 { oauth2ClientId :: Text, oauth2ClientSecret :: Text, oauth2AuthorizeEndpoint :: URIRef Absolute, oauth2TokenEndpoint :: URIRef Absolute, oauth2RedirectUri :: URIRef Absolute } deriving (Show, Eq) instance Default OAuth2 where def = OAuth2 { oauth2ClientId = "", oauth2ClientSecret = "", oauth2AuthorizeEndpoint = [uri|https://www.example.com/|], oauth2TokenEndpoint = [uri|https://www.example.com/|], oauth2RedirectUri = [uri|https://www.example.com/|] } newtype AccessToken = AccessToken {atoken :: Text} deriving (Binary, Eq, Show, FromJSON, ToJSON) newtype RefreshToken = RefreshToken {rtoken :: Text} deriving (Binary, Eq, Show, FromJSON, ToJSON) newtype IdToken = IdToken {idtoken :: Text} deriving (Binary, Eq, Show, FromJSON, ToJSON) newtype ExchangeToken = ExchangeToken {extoken :: Text} deriving (Show, FromJSON, ToJSON) -- | The gained Access Token. Use @Data.Aeson.decode@ to -- decode string to @AccessToken@. The @refreshToken@ is -- special in some cases, -- e.g. <https://developers.google.com/accounts/docs/OAuth2> data OAuth2Token = OAuth2Token { accessToken :: AccessToken, refreshToken :: Maybe RefreshToken, expiresIn :: Maybe Int, tokenType :: Maybe Text, idToken :: Maybe IdToken } deriving (Eq, Show, Generic) instance Binary OAuth2Token parseIntFlexible :: Value -> Parser Int parseIntFlexible (String s) = pure . read $ unpack s parseIntFlexible v = parseJSON v -- | Parse JSON data into 'OAuth2Token' instance FromJSON OAuth2Token where parseJSON = withObject "OAuth2Token" $ \v -> OAuth2Token <$> v .: "access_token" <*> v .:? "refresh_token" <*> explicitParseFieldMaybe parseIntFlexible v "expires_in" <*> v .:? "token_type" <*> v .:? "id_token" instance ToJSON OAuth2Token where toJSON = genericToJSON defaultOptions {fieldLabelModifier = camelTo2 '_'} toEncoding = genericToEncoding defaultOptions {fieldLabelModifier = camelTo2 '_'} data OAuth2Error a = OAuth2Error { error :: Either Text a, errorDescription :: Maybe Text, errorUri :: Maybe (URIRef Absolute) } deriving (Show, Eq, Generic) instance FromJSON err => FromJSON (OAuth2Error err) where parseJSON (Object a) = do err <- (a .: "error") >>= (\str -> Right <$> parseJSON str <|> Left <$> parseJSON str) desc <- a .:? "error_description" errorUri <- a .:? "error_uri" return $ OAuth2Error err desc errorUri parseJSON _ = fail "Expected an object" instance ToJSON err => ToJSON (OAuth2Error err) where toJSON = genericToJSON defaultOptions {constructorTagModifier = camelTo2 '_', allNullaryToStringTag = True} toEncoding = genericToEncoding defaultOptions {constructorTagModifier = camelTo2 '_', allNullaryToStringTag = True} parseOAuth2Error :: FromJSON err => BSL.ByteString -> OAuth2Error err parseOAuth2Error string = either (mkDecodeOAuth2Error string) id (eitherDecode string) mkDecodeOAuth2Error :: BSL.ByteString -> String -> OAuth2Error err mkDecodeOAuth2Error response err = OAuth2Error (Left "Decode error") (Just $ pack $ "Error: " <> err <> "\n Original Response:\n" <> show (decodeUtf8 $ BSL.toStrict response)) Nothing data APIAuthenticationMethod = -- | Provides in Authorization header AuthInRequestHeader | -- | Provides in request body AuthInRequestBody | -- | Provides in request query parameter AuthInRequestQuery deriving (Eq, Ord) data ClientAuthenticationMethod = ClientSecretBasic | ClientSecretPost deriving (Eq, Ord) -------------------------------------------------- -- * Types Synonym -------------------------------------------------- -- | type synonym of post body content type PostBody = [(BS.ByteString, BS.ByteString)] type QueryParams = [(BS.ByteString, BS.ByteString)] -------------------------------------------------- -- * Utilies -------------------------------------------------- defaultRequestHeaders :: [(HT.HeaderName, BS.ByteString)] defaultRequestHeaders = [ (HT.hUserAgent, "hoauth2"), (HT.hAccept, "application/json") ] appendQueryParams :: [(BS.ByteString, BS.ByteString)] -> URIRef a -> URIRef a appendQueryParams params = over (queryL . queryPairsL) (params ++) uriToRequest :: MonadThrow m => URI -> m Request uriToRequest auri = do ssl <- case view (uriSchemeL . schemeBSL) auri of "http" -> return False "https" -> return True s -> throwM $ InvalidUrlException (show auri) ("Invalid scheme: " ++ show s) let query = fmap (second Just) (view (queryL . queryPairsL) auri) hostL = authorityL . _Just . authorityHostL . hostBSL portL = authorityL . _Just . authorityPortL . _Just . portNumberL defaultPort = (if ssl then 443 else 80) :: Int req = setQueryString query $ defaultRequest { secure = ssl, path = view pathL auri } req2 = (over hostLens . maybe id const . preview hostL) auri req req3 = (over portLens . (const . fromMaybe defaultPort) . preview portL) auri req2 return req3 requestToUri :: Request -> URI requestToUri req = URI ( Scheme ( if secure req then "https" else "http" ) ) (Just (Authority Nothing (Host $ host req) (Just $ Port $ port req))) (path req) (Query $ H.parseSimpleQuery $ queryString req) Nothing hostLens :: Lens' Request BS.ByteString hostLens f req = f (C.host req) <&> \h' -> req {C.host = h'} {-# INLINE hostLens #-} portLens :: Lens' Request Int portLens f req = f (C.port req) <&> \p' -> req {C.port = p'} {-# INLINE portLens #-}
freizl/hoauth2
hoauth2/src/Network/OAuth/OAuth2/Internal.hs
bsd-3-clause
6,825
0
16
1,290
1,762
975
787
147
4
{-- snippet testscript --} import QC import Prettify2 import Test.QuickCheck.Batch options = TestOptions { no_of_tests = 200 , length_of_tests = 1 , debug_tests = False } main = do runTests "simple" options [ run prop_empty_id , run prop_char , run prop_text , run prop_line , run prop_double ] runTests "complex" options [ run prop_hcat , run prop_puncutate' , run prop_mempty_id ] {-- /snippet testscript --}
binesiyu/ifl
examples/ch11/Run2.hs
mit
540
0
9
194
113
61
52
18
1
{-# OPTIONS_GHC -fno-warn-orphans #-} module Application ( getApplicationDev , appMain , develMain , taskMain , makeFoundation , withEnv -- * for DevelMain , getApplicationRepl , shutdownApp -- * for GHCI , handler , db ) where import Control.Monad.Logger (liftLoc, runLoggingT) import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr, pgPoolSize, runSqlPool) import Import import Language.Haskell.TH.Syntax (qLocation) import Network.Wai.Handler.Warp (Settings, defaultSettings, defaultShouldDisplayException, runSettings, setHost, setOnException, setPort, getPort) import Network.Wai.Middleware.RequestLogger (Destination (Logger), IPAddrSource (..), OutputFormat (..), destination, mkRequestLogger, outputFormat) import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet, toLogStr) import Tasks (parseTask) import LoadEnv import System.Environment (getEnv) import Web.Heroku.Persist.Postgresql (postgresConf) -- Import all relevant handler modules here. -- Don't forget to add new modules to your cabal file! import Handler.Root import Handler.Common import Handler.Comments import Handler.Init import Handler.Embed import Handler.User import Handler.Feed import Handler.Unsubscribe import Handler.Sites import Handler.Docs import Handler.Purchase import Handler.Plan import Handler.Cancel -- This line actually creates our YesodDispatch instance. It is the second half -- of the call to mkYesodData which occurs in Foundation.hs. Please see the -- comments there for more details. mkYesodDispatch "App" resourcesApp -- | This function allocates resources (such as a database connection pool), -- performs initialization and return a foundation datatype value. This is also -- the place to put your migrate statements to have automatic database -- migrations handled by Yesod. makeFoundation :: AppSettings -> IO App makeFoundation appSettings = do dbconf <- if appDatabaseUrl appSettings then postgresConf $ pgPoolSize $ appDatabaseConf appSettings else return $ appDatabaseConf appSettings -- Some basic initializations: HTTP connection manager, logger, and static -- subsite. appHttpManager <- newManager appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger appStatic <- (if appMutableStatic appSettings then staticDevel else static) (appStaticDir appSettings) appGithubOAuthKeys <- getOAuthKeys "GITHUB" appGoogleOAuthKeys <- getOAuthKeys "GOOGLE" appStripeKeys <- getStripeKeys -- We need a log function to create a connection pool. We need a connection -- pool to create our foundation. And we need our foundation to get a -- logging function. To get out of this loop, we initially create a -- temporary foundation without a real connection pool, get a log function -- from there, and then create the real foundation. let mkFoundation appConnPool = App {..} tempFoundation = mkFoundation $ error "connPool forced in tempFoundation" logFunc = messageLoggerSource tempFoundation appLogger -- Create the database connection pool pool <- flip runLoggingT logFunc $ createPostgresqlPool (pgConnStr dbconf) (pgPoolSize dbconf) -- Perform database migration using our application's logging settings. runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc -- Return the foundation return $ mkFoundation pool where getOAuthKeys :: String -> IO OAuthKeys getOAuthKeys plugin = OAuthKeys <$> getEnvText (plugin ++ "_OAUTH_CLIENT_ID") <*> getEnvText (plugin ++ "_OAUTH_CLIENT_SECRET") getStripeKeys :: IO StripeKeys getStripeKeys = StripeKeys <$> getEnvText "STRIPE_SECRET_KEY" <*> getEnvText "STRIPE_PUBLISHABLE_KEY" getEnvText :: String -> IO Text getEnvText = fmap pack . getEnv -- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and -- applyng some additional middlewares. makeApplication :: App -> IO Application makeApplication foundation = do logWare <- mkRequestLogger def { outputFormat = if appDetailedRequestLogging $ appSettings foundation then Detailed True else Apache (if appIpFromHeader $ appSettings foundation then FromFallback else FromSocket) , destination = Logger $ loggerSet $ appLogger foundation } -- Create the WAI application and apply middlewares appPlain <- toWaiAppPlain foundation return $ logWare $ defaultMiddlewaresNoLogging appPlain -- | Warp settings for the given foundation value. warpSettings :: App -> Settings warpSettings foundation = setPort (appPort $ appSettings foundation) $ setHost (appHost $ appSettings foundation) $ setOnException (\_req e -> when (defaultShouldDisplayException e) $ messageLoggerSource foundation (appLogger foundation) $(qLocation >>= liftLoc) "yesod" LevelError (toLogStr $ "Exception from Warp: " ++ show e)) defaultSettings -- | For yesod devel, return the Warp settings and WAI Application. getApplicationDev :: IO (Settings, Application) getApplicationDev = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app <- makeApplication foundation return (wsettings, app) getAppSettings :: IO AppSettings getAppSettings = withEnv $ loadAppSettings [configSettingsYml] [] useEnv -- | main function for use by yesod devel develMain :: IO () develMain = develMainHelper getApplicationDev -- | The @main@ function for an executable running this site. appMain :: IO () appMain = do -- Get the settings from all relevant sources settings <- withEnv $ loadAppSettingsArgs -- fall back to compile-time values, set to [] to require values at runtime [configSettingsYmlValue] -- allow environment variables to override useEnv -- Generate the foundation from the settings foundation <- makeFoundation settings -- Generate a WAI Application from the foundation app <- makeApplication foundation -- Run the application with Warp runSettings (warpSettings foundation) app taskMain :: IO () taskMain = parseTask >>= handler withEnv :: IO a -> IO a withEnv = (loadEnv >>) -------------------------------------------------------------- -- Functions for DevelMain.hs (a way to run the app from GHCi) -------------------------------------------------------------- getApplicationRepl :: IO (Int, App, Application) getApplicationRepl = do settings <- getAppSettings foundation <- makeFoundation settings wsettings <- getDevSettings $ warpSettings foundation app1 <- makeApplication foundation return (getPort wsettings, foundation, app1) shutdownApp :: App -> IO () shutdownApp _ = return () --------------------------------------------- -- Functions for use in development with GHCi --------------------------------------------- -- | Run a handler handler :: Handler a -> IO a handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h -- | Run DB queries db :: ReaderT SqlBackend (HandlerT App IO) a -> IO a db = handler . runDB
vaporware/carnival
Application.hs
mit
7,838
0
16
1,980
1,310
698
612
-1
-1
--{-# LANGUAGE NamedFieldPuns, Safe #-} {-# LANGUAGE NamedFieldPuns #-} import Control.Concurrent.MVar (takeMVar) import Control.Monad (forM, forM_, replicateM) import Data.List (group, intercalate) import RP ( RP, RPE, RPR, RPW, ThreadState(..), tid, runRP, forkRP, joinRP, threadDelayRP, readRP, writeRP , SRef, readSRef, writeSRef, newSRef ) data RPList s a = Nil | Cons a (SRef s (RPList s a)) snapshot :: Int -> RPList s Int -> RPR s Int snapshot acc Nil = return acc snapshot acc (Cons x rn) = snapshot (x + acc) =<< readSRef rn reader :: Int -> Int -> SRef s (RPList s Int) -> RPR s Int reader 0 acc _ = return acc reader n acc head = do acc' <- snapshot acc =<< readSRef head reader (n - 1) acc' head deleteMiddle :: SRef s (RPList s a) -> RPW s () deleteMiddle rl = do (Cons a rn) <- readSRef rl (Cons _ rm) <- readSRef rn writeSRef rl $ Cons a rm testList :: RP s (SRef s (RPList s Int)) testList = do tail <- newSRef Nil c1 <- newSRef $ Cons (- 1) tail c2 <- newSRef $ Cons 1 c1 newSRef $ Cons 1 c2 main :: IO () main = do outs <- runRP $ do -- initialize list head <- testList -- spawn 8 readers, each records 100000 snapshots of the list rts <- replicateM 8 $ forkRP $ readRP $ reader 1000000 0 head -- spawn a writer to delete the middle node wt <- forkRP $ writeRP $ deleteMiddle head --wt <- forkRP $ writeRP $ return () -- wait for the readers to finish and print snapshots outs <- forM rts $ \rt@(ThreadState {tid}) -> do v <- joinRP rt return $ show tid ++ ": " ++ show v -- wait for the writer to finish joinRP wt return outs forM_ outs putStrLn
ekmett/MonadicRP
src/RPListIntTest.hs
gpl-3.0
1,707
0
19
451
645
323
322
39
1
module Response.Draw (drawResponse) where import Text.Blaze ((!)) import qualified Text.Blaze.Html5 as H import qualified Text.Blaze.Html5.Attributes as A import Control.Monad (forM_) import Happstack.Server import MasterTemplate import Scripts drawResponse :: ServerPart Response drawResponse = ok $ toResponse $ masterTemplate "Courseography - Draw!" [] (do header "draw" drawContent modePanel ) drawScripts drawContent :: H.Html drawContent = H.div ! A.id "about-div" $ "Draw a Graph" modePanel :: H.Html modePanel = H.div ! A.id "side-panel-wrap" $ do H.div ! A.id "node-mode" ! A.class_ "mode clicked" $ "NODE (n)" H.input ! A.id "course-code" ! A.class_ "course-code" ! A.name "course-code" ! A.placeholder "Course Code" ! A.autocomplete "off" ! A.type_ "text" ! A.size "10" H.div ! A.id "add-text" ! A.class_ "button" $ "ADD" H.div ! A.id "path-mode" ! A.class_ "mode" $ "PATH (p)" H.div ! A.id "region-mode" ! A.class_ "mode" $ "REGION (r)" H.div ! A.id "finish-region" ! A.class_ "button" $ "finish (f)" H.div ! A.id "change-mode" ! A.class_ "mode" $ "SELECT/MOVE (m)" H.div ! A.id "erase-mode" ! A.class_ "mode" $ "ERASE (e)" H.input ! A.id "select-colour" ! A.class_ "jscolor" ! A.value "ff7878" -- pastelRed color as defined in Css.Constants ! A.size "15" H.table !A.id "colour-table" $ forM_ (replicate 2 $ replicate 5 "" :: [[H.Html]]) (H.tr . mapM_ (H.td . H.toHtml)) H.div ! A.id "save-graph" ! A.class_ "button" $ "SAVE" H.input ! A.id "area-of-study" ! A.class_ "course-code" ! A.name "course-code" ! A.placeholder "Enter area of study." ! A.autocomplete "off" ! A.type_ "text" ! A.size "30" H.div ! A.id "submit-graph-name" ! A.class_ "button" $ "Search for department" H.div ! A.id "json-data" ! A.class_ "json-data" $ ""
christinem/courseography
app/Response/Draw.hs
gpl-3.0
2,168
0
16
684
684
328
356
53
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.ElasticTranscoder.DeletePreset -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- The DeletePreset operation removes a preset that you\'ve added in an AWS -- region. -- -- You can\'t delete the default presets that are included with Elastic -- Transcoder. -- -- /See:/ <http://docs.aws.amazon.com/elastictranscoder/latest/developerguide/DeletePreset.html AWS API Reference> for DeletePreset. module Network.AWS.ElasticTranscoder.DeletePreset ( -- * Creating a Request deletePreset , DeletePreset -- * Request Lenses , dpId -- * Destructuring the Response , deletePresetResponse , DeletePresetResponse -- * Response Lenses , dprsResponseStatus ) where import Network.AWS.ElasticTranscoder.Types import Network.AWS.ElasticTranscoder.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | The 'DeletePresetRequest' structure. -- -- /See:/ 'deletePreset' smart constructor. newtype DeletePreset = DeletePreset' { _dpId :: Text } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'DeletePreset' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dpId' deletePreset :: Text -- ^ 'dpId' -> DeletePreset deletePreset pId_ = DeletePreset' { _dpId = pId_ } -- | The identifier of the preset for which you want to get detailed -- information. dpId :: Lens' DeletePreset Text dpId = lens _dpId (\ s a -> s{_dpId = a}); instance AWSRequest DeletePreset where type Rs DeletePreset = DeletePresetResponse request = delete elasticTranscoder response = receiveEmpty (\ s h x -> DeletePresetResponse' <$> (pure (fromEnum s))) instance ToHeaders DeletePreset where toHeaders = const mempty instance ToPath DeletePreset where toPath DeletePreset'{..} = mconcat ["/2012-09-25/presets/", toBS _dpId] instance ToQuery DeletePreset where toQuery = const mempty -- | The 'DeletePresetResponse' structure. -- -- /See:/ 'deletePresetResponse' smart constructor. newtype DeletePresetResponse = DeletePresetResponse' { _dprsResponseStatus :: Int } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'DeletePresetResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dprsResponseStatus' deletePresetResponse :: Int -- ^ 'dprsResponseStatus' -> DeletePresetResponse deletePresetResponse pResponseStatus_ = DeletePresetResponse' { _dprsResponseStatus = pResponseStatus_ } -- | The response status code. dprsResponseStatus :: Lens' DeletePresetResponse Int dprsResponseStatus = lens _dprsResponseStatus (\ s a -> s{_dprsResponseStatus = a});
fmapfmapfmap/amazonka
amazonka-elastictranscoder/gen/Network/AWS/ElasticTranscoder/DeletePreset.hs
mpl-2.0
3,536
0
13
717
445
274
171
57
1
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.DynamoDB.UpdateItem -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Edits an existing item\'s attributes, or adds a new item to the table if -- it does not already exist. You can put, delete, or add attribute values. -- You can also perform a conditional update on an existing item (insert a -- new attribute name-value pair if it doesn\'t exist, or replace an -- existing name-value pair if it has certain expected attribute values). -- If conditions are specified and the item does not exist, then the -- operation fails and a new item is not created. -- -- You can also return the item\'s attribute values in the same -- /UpdateItem/ operation using the /ReturnValues/ parameter. -- -- /See:/ <http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html AWS API Reference> for UpdateItem. module Network.AWS.DynamoDB.UpdateItem ( -- * Creating a Request updateItem , UpdateItem -- * Request Lenses , uiExpressionAttributeNames , uiReturnValues , uiUpdateExpression , uiExpressionAttributeValues , uiAttributeUpdates , uiReturnConsumedCapacity , uiReturnItemCollectionMetrics , uiConditionExpression , uiConditionalOperator , uiExpected , uiTableName , uiKey -- * Destructuring the Response , updateItemResponse , UpdateItemResponse -- * Response Lenses , uirsItemCollectionMetrics , uirsConsumedCapacity , uirsAttributes , uirsResponseStatus ) where import Network.AWS.DynamoDB.Types import Network.AWS.DynamoDB.Types.Product import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response -- | Represents the input of an /UpdateItem/ operation. -- -- /See:/ 'updateItem' smart constructor. data UpdateItem = UpdateItem' { _uiExpressionAttributeNames :: !(Maybe (Map Text Text)) , _uiReturnValues :: !(Maybe ReturnValue) , _uiUpdateExpression :: !(Maybe Text) , _uiExpressionAttributeValues :: !(Maybe (Map Text AttributeValue)) , _uiAttributeUpdates :: !(Maybe (Map Text AttributeValueUpdate)) , _uiReturnConsumedCapacity :: !(Maybe ReturnConsumedCapacity) , _uiReturnItemCollectionMetrics :: !(Maybe ReturnItemCollectionMetrics) , _uiConditionExpression :: !(Maybe Text) , _uiConditionalOperator :: !(Maybe ConditionalOperator) , _uiExpected :: !(Maybe (Map Text ExpectedAttributeValue)) , _uiTableName :: !Text , _uiKey :: !(Map Text AttributeValue) } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'UpdateItem' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'uiExpressionAttributeNames' -- -- * 'uiReturnValues' -- -- * 'uiUpdateExpression' -- -- * 'uiExpressionAttributeValues' -- -- * 'uiAttributeUpdates' -- -- * 'uiReturnConsumedCapacity' -- -- * 'uiReturnItemCollectionMetrics' -- -- * 'uiConditionExpression' -- -- * 'uiConditionalOperator' -- -- * 'uiExpected' -- -- * 'uiTableName' -- -- * 'uiKey' updateItem :: Text -- ^ 'uiTableName' -> UpdateItem updateItem pTableName_ = UpdateItem' { _uiExpressionAttributeNames = Nothing , _uiReturnValues = Nothing , _uiUpdateExpression = Nothing , _uiExpressionAttributeValues = Nothing , _uiAttributeUpdates = Nothing , _uiReturnConsumedCapacity = Nothing , _uiReturnItemCollectionMetrics = Nothing , _uiConditionExpression = Nothing , _uiConditionalOperator = Nothing , _uiExpected = Nothing , _uiTableName = pTableName_ , _uiKey = mempty } -- | One or more substitution tokens for attribute names in an expression. -- The following are some use cases for using /ExpressionAttributeNames/: -- -- - To access an attribute whose name conflicts with a DynamoDB reserved -- word. -- -- - To create a placeholder for repeating occurrences of an attribute -- name in an expression. -- -- - To prevent special characters in an attribute name from being -- misinterpreted in an expression. -- -- Use the __#__ character in an expression to dereference an attribute -- name. For example, consider the following attribute name: -- -- - 'Percentile' -- -- The name of this attribute conflicts with a reserved word, so it cannot -- be used directly in an expression. (For the complete list of reserved -- words, see -- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ReservedWords.html Reserved Words> -- in the /Amazon DynamoDB Developer Guide/). To work around this, you -- could specify the following for /ExpressionAttributeNames/: -- -- - '{\"#P\":\"Percentile\"}' -- -- You could then use this substitution in an expression, as in this -- example: -- -- - '#P = :val' -- -- Tokens that begin with the __:__ character are /expression attribute -- values/, which are placeholders for the actual value at runtime. -- -- For more information on expression attribute names, see -- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.AccessingItemAttributes.html Accessing Item Attributes> -- in the /Amazon DynamoDB Developer Guide/. uiExpressionAttributeNames :: Lens' UpdateItem (HashMap Text Text) uiExpressionAttributeNames = lens _uiExpressionAttributeNames (\ s a -> s{_uiExpressionAttributeNames = a}) . _Default . _Map; -- | Use /ReturnValues/ if you want to get the item attributes as they -- appeared either before or after they were updated. For /UpdateItem/, the -- valid values are: -- -- - 'NONE' - If /ReturnValues/ is not specified, or if its value is -- 'NONE', then nothing is returned. (This setting is the default for -- /ReturnValues/.) -- -- - 'ALL_OLD' - If /UpdateItem/ overwrote an attribute name-value pair, -- then the content of the old item is returned. -- -- - 'UPDATED_OLD' - The old versions of only the updated attributes are -- returned. -- -- - 'ALL_NEW' - All of the attributes of the new version of the item are -- returned. -- -- - 'UPDATED_NEW' - The new versions of only the updated attributes are -- returned. -- uiReturnValues :: Lens' UpdateItem (Maybe ReturnValue) uiReturnValues = lens _uiReturnValues (\ s a -> s{_uiReturnValues = a}); -- | An expression that defines one or more attributes to be updated, the -- action to be performed on them, and new value(s) for them. -- -- The following action values are available for /UpdateExpression/. -- -- - 'SET' - Adds one or more attributes and values to an item. If any of -- these attribute already exist, they are replaced by the new values. -- You can also use 'SET' to add or subtract from an attribute that is -- of type Number. For example: 'SET myNum = myNum + :val' -- -- 'SET' supports the following functions: -- -- - 'if_not_exists (path, operand)' - if the item does not contain -- an attribute at the specified path, then 'if_not_exists' -- evaluates to operand; otherwise, it evaluates to path. You can -- use this function to avoid overwriting an attribute that may -- already be present in the item. -- -- - 'list_append (operand, operand)' - evaluates to a list with a -- new element added to it. You can append the new element to the -- start or the end of the list by reversing the order of the -- operands. -- -- These function names are case-sensitive. -- -- - 'REMOVE' - Removes one or more attributes from an item. -- -- - 'ADD' - Adds the specified value to the item, if the attribute does -- not already exist. If the attribute does exist, then the behavior of -- 'ADD' depends on the data type of the attribute: -- -- - If the existing attribute is a number, and if /Value/ is also a -- number, then /Value/ is mathematically added to the existing -- attribute. If /Value/ is a negative number, then it is -- subtracted from the existing attribute. -- -- If you use 'ADD' to increment or decrement a number value for an -- item that doesn\'t exist before the update, DynamoDB uses '0' as -- the initial value. -- -- Similarly, if you use 'ADD' for an existing item to increment or -- decrement an attribute value that doesn\'t exist before the -- update, DynamoDB uses '0' as the initial value. For example, -- suppose that the item you want to update doesn\'t have an -- attribute named /itemcount/, but you decide to 'ADD' the number -- '3' to this attribute anyway. DynamoDB will create the -- /itemcount/ attribute, set its initial value to '0', and finally -- add '3' to it. The result will be a new /itemcount/ attribute in -- the item, with a value of '3'. -- -- - If the existing data type is a set and if /Value/ is also a set, -- then /Value/ is added to the existing set. For example, if the -- attribute value is the set '[1,2]', and the 'ADD' action -- specified '[3]', then the final attribute value is '[1,2,3]'. An -- error occurs if an 'ADD' action is specified for a set attribute -- and the attribute type specified does not match the existing set -- type. -- -- Both sets must have the same primitive data type. For example, -- if the existing data type is a set of strings, the /Value/ must -- also be a set of strings. -- -- The 'ADD' action only supports Number and set data types. In -- addition, 'ADD' can only be used on top-level attributes, not nested -- attributes. -- -- - 'DELETE' - Deletes an element from a set. -- -- If a set of values is specified, then those values are subtracted -- from the old set. For example, if the attribute value was the set -- '[a,b,c]' and the 'DELETE' action specifies '[a,c]', then the final -- attribute value is '[b]'. Specifying an empty set is an error. -- -- The 'DELETE' action only supports set data types. In addition, -- 'DELETE' can only be used on top-level attributes, not nested -- attributes. -- -- You can have many actions in a single expression, such as the following: -- 'SET a=:value1, b=:value2 DELETE :value3, :value4, :value5' -- -- For more information on update expressions, see -- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.Modifying.html Modifying Items and Attributes> -- in the /Amazon DynamoDB Developer Guide/. -- -- /UpdateExpression/ replaces the legacy /AttributeUpdates/ parameter. uiUpdateExpression :: Lens' UpdateItem (Maybe Text) uiUpdateExpression = lens _uiUpdateExpression (\ s a -> s{_uiUpdateExpression = a}); -- | One or more values that can be substituted in an expression. -- -- Use the __:__ (colon) character in an expression to dereference an -- attribute value. For example, suppose that you wanted to check whether -- the value of the /ProductStatus/ attribute was one of the following: -- -- 'Available | Backordered | Discontinued' -- -- You would first need to specify /ExpressionAttributeValues/ as follows: -- -- '{ \":avail\":{\"S\":\"Available\"}, \":back\":{\"S\":\"Backordered\"}, \":disc\":{\"S\":\"Discontinued\"} }' -- -- You could then use these values in an expression, such as this: -- -- 'ProductStatus IN (:avail, :back, :disc)' -- -- For more information on expression attribute values, see -- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html Specifying Conditions> -- in the /Amazon DynamoDB Developer Guide/. uiExpressionAttributeValues :: Lens' UpdateItem (HashMap Text AttributeValue) uiExpressionAttributeValues = lens _uiExpressionAttributeValues (\ s a -> s{_uiExpressionAttributeValues = a}) . _Default . _Map; -- | This is a legacy parameter, for backward compatibility. New applications -- should use /UpdateExpression/ instead. Do not combine legacy parameters -- and expression parameters in a single API call; otherwise, DynamoDB will -- return a /ValidationException/ exception. -- -- This parameter can be used for modifying top-level attributes; however, -- it does not support individual list or map elements. -- -- The names of attributes to be modified, the action to perform on each, -- and the new value for each. If you are updating an attribute that is an -- index key attribute for any indexes on that table, the attribute type -- must match the index key type defined in the /AttributesDefinition/ of -- the table description. You can use /UpdateItem/ to update any nonkey -- attributes. -- -- Attribute values cannot be null. String and Binary type attributes must -- have lengths greater than zero. Set type attributes must not be empty. -- Requests with empty values will be rejected with a /ValidationException/ -- exception. -- -- Each /AttributeUpdates/ element consists of an attribute name to modify, -- along with the following: -- -- - /Value/ - The new value, if applicable, for this attribute. -- -- - /Action/ - A value that specifies how to perform the update. This -- action is only valid for an existing attribute whose data type is -- Number or is a set; do not use 'ADD' for other data types. -- -- If an item with the specified primary key is found in the table, the -- following values perform the following actions: -- -- - 'PUT' - Adds the specified attribute to the item. If the -- attribute already exists, it is replaced by the new value. -- -- - 'DELETE' - Removes the attribute and its value, if no value is -- specified for 'DELETE'. The data type of the specified value -- must match the existing value\'s data type. -- -- If a set of values is specified, then those values are -- subtracted from the old set. For example, if the attribute value -- was the set '[a,b,c]' and the 'DELETE' action specifies '[a,c]', -- then the final attribute value is '[b]'. Specifying an empty set -- is an error. -- -- - 'ADD' - Adds the specified value to the item, if the attribute -- does not already exist. If the attribute does exist, then the -- behavior of 'ADD' depends on the data type of the attribute: -- -- - If the existing attribute is a number, and if /Value/ is -- also a number, then /Value/ is mathematically added to the -- existing attribute. If /Value/ is a negative number, then it -- is subtracted from the existing attribute. -- -- If you use 'ADD' to increment or decrement a number value -- for an item that doesn\'t exist before the update, DynamoDB -- uses 0 as the initial value. -- -- Similarly, if you use 'ADD' for an existing item to -- increment or decrement an attribute value that doesn\'t -- exist before the update, DynamoDB uses '0' as the initial -- value. For example, suppose that the item you want to update -- doesn\'t have an attribute named /itemcount/, but you decide -- to 'ADD' the number '3' to this attribute anyway. DynamoDB -- will create the /itemcount/ attribute, set its initial value -- to '0', and finally add '3' to it. The result will be a new -- /itemcount/ attribute, with a value of '3'. -- -- - If the existing data type is a set, and if /Value/ is also a -- set, then /Value/ is appended to the existing set. For -- example, if the attribute value is the set '[1,2]', and the -- 'ADD' action specified '[3]', then the final attribute value -- is '[1,2,3]'. An error occurs if an 'ADD' action is -- specified for a set attribute and the attribute type -- specified does not match the existing set type. -- -- Both sets must have the same primitive data type. For -- example, if the existing data type is a set of strings, -- /Value/ must also be a set of strings. -- -- If no item with the specified key is found in the table, the -- following values perform the following actions: -- -- - 'PUT' - Causes DynamoDB to create a new item with the specified -- primary key, and then adds the attribute. -- -- - 'DELETE' - Nothing happens, because attributes cannot be deleted -- from a nonexistent item. The operation succeeds, but DynamoDB -- does not create a new item. -- -- - 'ADD' - Causes DynamoDB to create an item with the supplied -- primary key and number (or set of numbers) for the attribute -- value. The only data types allowed are Number and Number Set. -- -- If you provide any attributes that are part of an index key, then the -- data types for those attributes must match those of the schema in the -- table\'s attribute definition. uiAttributeUpdates :: Lens' UpdateItem (HashMap Text AttributeValueUpdate) uiAttributeUpdates = lens _uiAttributeUpdates (\ s a -> s{_uiAttributeUpdates = a}) . _Default . _Map; -- | Undocumented member. uiReturnConsumedCapacity :: Lens' UpdateItem (Maybe ReturnConsumedCapacity) uiReturnConsumedCapacity = lens _uiReturnConsumedCapacity (\ s a -> s{_uiReturnConsumedCapacity = a}); -- | Determines whether item collection metrics are returned. If set to -- 'SIZE', the response includes statistics about item collections, if any, -- that were modified during the operation are returned in the response. If -- set to 'NONE' (the default), no statistics are returned. uiReturnItemCollectionMetrics :: Lens' UpdateItem (Maybe ReturnItemCollectionMetrics) uiReturnItemCollectionMetrics = lens _uiReturnItemCollectionMetrics (\ s a -> s{_uiReturnItemCollectionMetrics = a}); -- | A condition that must be satisfied in order for a conditional update to -- succeed. -- -- An expression can contain any of the following: -- -- - Functions: -- 'attribute_exists | attribute_not_exists | attribute_type | contains | begins_with | size' -- -- These function names are case-sensitive. -- -- - Comparison operators: ' = | \<> | \< | > | \<= | >= | BETWEEN | IN' -- -- - Logical operators: 'AND | OR | NOT' -- -- For more information on condition expressions, see -- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.SpecifyingConditions.html Specifying Conditions> -- in the /Amazon DynamoDB Developer Guide/. -- -- /ConditionExpression/ replaces the legacy /ConditionalOperator/ and -- /Expected/ parameters. uiConditionExpression :: Lens' UpdateItem (Maybe Text) uiConditionExpression = lens _uiConditionExpression (\ s a -> s{_uiConditionExpression = a}); -- | This is a legacy parameter, for backward compatibility. New applications -- should use /ConditionExpression/ instead. Do not combine legacy -- parameters and expression parameters in a single API call; otherwise, -- DynamoDB will return a /ValidationException/ exception. -- -- A logical operator to apply to the conditions in the /Expected/ map: -- -- - 'AND' - If all of the conditions evaluate to true, then the entire -- map evaluates to true. -- -- - 'OR' - If at least one of the conditions evaluate to true, then the -- entire map evaluates to true. -- -- If you omit /ConditionalOperator/, then 'AND' is the default. -- -- The operation will succeed only if the entire map evaluates to true. -- -- This parameter does not support attributes of type List or Map. uiConditionalOperator :: Lens' UpdateItem (Maybe ConditionalOperator) uiConditionalOperator = lens _uiConditionalOperator (\ s a -> s{_uiConditionalOperator = a}); -- | This is a legacy parameter, for backward compatibility. New applications -- should use /ConditionExpression/ instead. Do not combine legacy -- parameters and expression parameters in a single API call; otherwise, -- DynamoDB will return a /ValidationException/ exception. -- -- A map of attribute\/condition pairs. /Expected/ provides a conditional -- block for the /UpdateItem/ operation. -- -- Each element of /Expected/ consists of an attribute name, a comparison -- operator, and one or more values. DynamoDB compares the attribute with -- the value(s) you supplied, using the comparison operator. For each -- /Expected/ element, the result of the evaluation is either true or -- false. -- -- If you specify more than one element in the /Expected/ map, then by -- default all of the conditions must evaluate to true. In other words, the -- conditions are ANDed together. (You can use the /ConditionalOperator/ -- parameter to OR the conditions instead. If you do this, then at least -- one of the conditions must evaluate to true, rather than all of them.) -- -- If the /Expected/ map evaluates to true, then the conditional operation -- succeeds; otherwise, it fails. -- -- /Expected/ contains the following: -- -- - /AttributeValueList/ - One or more values to evaluate against the -- supplied attribute. The number of values in the list depends on the -- /ComparisonOperator/ being used. -- -- For type Number, value comparisons are numeric. -- -- String value comparisons for greater than, equals, or less than are -- based on ASCII character code values. For example, 'a' is greater -- than 'A', and 'a' is greater than 'B'. For a list of code values, -- see <http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters>. -- -- For type Binary, DynamoDB treats each byte of the binary data as -- unsigned when it compares binary values. -- -- - /ComparisonOperator/ - A comparator for evaluating attributes in the -- /AttributeValueList/. When performing the comparison, DynamoDB uses -- strongly consistent reads. -- -- The following comparison operators are available: -- -- 'EQ | NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH | IN | BETWEEN' -- -- The following are descriptions of each comparison operator. -- -- - 'EQ' : Equal. 'EQ' is supported for all datatypes, including -- lists and maps. -- -- /AttributeValueList/ can contain only one /AttributeValue/ -- element of type String, Number, Binary, String Set, Number Set, -- or Binary Set. If an item contains an /AttributeValue/ element -- of a different type than the one provided in the request, the -- value does not match. For example, '{\"S\":\"6\"}' does not -- equal '{\"N\":\"6\"}'. Also, '{\"N\":\"6\"}' does not equal -- '{\"NS\":[\"6\", \"2\", \"1\"]}'. -- -- - 'NE' : Not equal. 'NE' is supported for all datatypes, including -- lists and maps. -- -- /AttributeValueList/ can contain only one /AttributeValue/ of -- type String, Number, Binary, String Set, Number Set, or Binary -- Set. If an item contains an /AttributeValue/ of a different type -- than the one provided in the request, the value does not match. -- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. -- Also, '{\"N\":\"6\"}' does not equal -- '{\"NS\":[\"6\", \"2\", \"1\"]}'. -- -- - 'LE' : Less than or equal. -- -- /AttributeValueList/ can contain only one /AttributeValue/ -- element of type String, Number, or Binary (not a set type). If -- an item contains an /AttributeValue/ element of a different type -- than the one provided in the request, the value does not match. -- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. -- Also, '{\"N\":\"6\"}' does not compare to -- '{\"NS\":[\"6\", \"2\", \"1\"]}'. -- -- - 'LT' : Less than. -- -- /AttributeValueList/ can contain only one /AttributeValue/ of -- type String, Number, or Binary (not a set type). If an item -- contains an /AttributeValue/ element of a different type than -- the one provided in the request, the value does not match. For -- example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. Also, -- '{\"N\":\"6\"}' does not compare to -- '{\"NS\":[\"6\", \"2\", \"1\"]}'. -- -- - 'GE' : Greater than or equal. -- -- /AttributeValueList/ can contain only one /AttributeValue/ -- element of type String, Number, or Binary (not a set type). If -- an item contains an /AttributeValue/ element of a different type -- than the one provided in the request, the value does not match. -- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. -- Also, '{\"N\":\"6\"}' does not compare to -- '{\"NS\":[\"6\", \"2\", \"1\"]}'. -- -- - 'GT' : Greater than. -- -- /AttributeValueList/ can contain only one /AttributeValue/ -- element of type String, Number, or Binary (not a set type). If -- an item contains an /AttributeValue/ element of a different type -- than the one provided in the request, the value does not match. -- For example, '{\"S\":\"6\"}' does not equal '{\"N\":\"6\"}'. -- Also, '{\"N\":\"6\"}' does not compare to -- '{\"NS\":[\"6\", \"2\", \"1\"]}'. -- -- - 'NOT_NULL' : The attribute exists. 'NOT_NULL' is supported for -- all datatypes, including lists and maps. -- -- This operator tests for the existence of an attribute, not its -- data type. If the data type of attribute \"'a'\" is null, and -- you evaluate it using 'NOT_NULL', the result is a Boolean -- /true/. This result is because the attribute \"'a'\" exists; its -- data type is not relevant to the 'NOT_NULL' comparison operator. -- -- - 'NULL' : The attribute does not exist. 'NULL' is supported for -- all datatypes, including lists and maps. -- -- This operator tests for the nonexistence of an attribute, not -- its data type. If the data type of attribute \"'a'\" is null, -- and you evaluate it using 'NULL', the result is a Boolean -- /false/. This is because the attribute \"'a'\" exists; its data -- type is not relevant to the 'NULL' comparison operator. -- -- - 'CONTAINS' : Checks for a subsequence, or value in a set. -- -- /AttributeValueList/ can contain only one /AttributeValue/ -- element of type String, Number, or Binary (not a set type). If -- the target attribute of the comparison is of type String, then -- the operator checks for a substring match. If the target -- attribute of the comparison is of type Binary, then the operator -- looks for a subsequence of the target that matches the input. If -- the target attribute of the comparison is a set (\"'SS'\", -- \"'NS'\", or \"'BS'\"), then the operator evaluates to true if -- it finds an exact match with any member of the set. -- -- CONTAINS is supported for lists: When evaluating -- \"'a CONTAINS b'\", \"'a'\" can be a list; however, \"'b'\" -- cannot be a set, a map, or a list. -- -- - 'NOT_CONTAINS' : Checks for absence of a subsequence, or absence -- of a value in a set. -- -- /AttributeValueList/ can contain only one /AttributeValue/ -- element of type String, Number, or Binary (not a set type). If -- the target attribute of the comparison is a String, then the -- operator checks for the absence of a substring match. If the -- target attribute of the comparison is Binary, then the operator -- checks for the absence of a subsequence of the target that -- matches the input. If the target attribute of the comparison is -- a set (\"'SS'\", \"'NS'\", or \"'BS'\"), then the operator -- evaluates to true if it /does not/ find an exact match with any -- member of the set. -- -- NOT_CONTAINS is supported for lists: When evaluating -- \"'a NOT CONTAINS b'\", \"'a'\" can be a list; however, \"'b'\" -- cannot be a set, a map, or a list. -- -- - 'BEGINS_WITH' : Checks for a prefix. -- -- /AttributeValueList/ can contain only one /AttributeValue/ of -- type String or Binary (not a Number or a set type). The target -- attribute of the comparison must be of type String or Binary -- (not a Number or a set type). -- -- - 'IN' : Checks for matching elements within two sets. -- -- /AttributeValueList/ can contain one or more /AttributeValue/ -- elements of type String, Number, or Binary (not a set type). -- These attributes are compared against an existing set type -- attribute of an item. If any elements of the input set are -- present in the item attribute, the expression evaluates to true. -- -- - 'BETWEEN' : Greater than or equal to the first value, and less -- than or equal to the second value. -- -- /AttributeValueList/ must contain two /AttributeValue/ elements -- of the same type, either String, Number, or Binary (not a set -- type). A target attribute matches if the target value is greater -- than, or equal to, the first element and less than, or equal to, -- the second element. If an item contains an /AttributeValue/ -- element of a different type than the one provided in the -- request, the value does not match. For example, '{\"S\":\"6\"}' -- does not compare to '{\"N\":\"6\"}'. Also, '{\"N\":\"6\"}' does -- not compare to '{\"NS\":[\"6\", \"2\", \"1\"]}' -- -- For usage examples of /AttributeValueList/ and /ComparisonOperator/, see -- <http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.html Legacy Conditional Parameters> -- in the /Amazon DynamoDB Developer Guide/. -- -- For backward compatibility with previous DynamoDB releases, the -- following parameters can be used instead of /AttributeValueList/ and -- /ComparisonOperator/: -- -- - /Value/ - A value for DynamoDB to compare with an attribute. -- -- - /Exists/ - A Boolean value that causes DynamoDB to evaluate the -- value before attempting the conditional operation: -- -- - If /Exists/ is 'true', DynamoDB will check to see if that -- attribute value already exists in the table. If it is found, -- then the condition evaluates to true; otherwise the condition -- evaluate to false. -- -- - If /Exists/ is 'false', DynamoDB assumes that the attribute -- value does /not/ exist in the table. If in fact the value does -- not exist, then the assumption is valid and the condition -- evaluates to true. If the value is found, despite the assumption -- that it does not exist, the condition evaluates to false. -- -- Note that the default value for /Exists/ is 'true'. -- -- The /Value/ and /Exists/ parameters are incompatible with -- /AttributeValueList/ and /ComparisonOperator/. Note that if you use both -- sets of parameters at once, DynamoDB will return a /ValidationException/ -- exception. -- -- This parameter does not support attributes of type List or Map. uiExpected :: Lens' UpdateItem (HashMap Text ExpectedAttributeValue) uiExpected = lens _uiExpected (\ s a -> s{_uiExpected = a}) . _Default . _Map; -- | The name of the table containing the item to update. uiTableName :: Lens' UpdateItem Text uiTableName = lens _uiTableName (\ s a -> s{_uiTableName = a}); -- | The primary key of the item to be updated. Each element consists of an -- attribute name and a value for that attribute. -- -- For the primary key, you must provide all of the attributes. For -- example, with a hash type primary key, you only need to provide the hash -- attribute. For a hash-and-range type primary key, you must provide both -- the hash attribute and the range attribute. uiKey :: Lens' UpdateItem (HashMap Text AttributeValue) uiKey = lens _uiKey (\ s a -> s{_uiKey = a}) . _Map; instance AWSRequest UpdateItem where type Rs UpdateItem = UpdateItemResponse request = postJSON dynamoDB response = receiveJSON (\ s h x -> UpdateItemResponse' <$> (x .?> "ItemCollectionMetrics") <*> (x .?> "ConsumedCapacity") <*> (x .?> "Attributes" .!@ mempty) <*> (pure (fromEnum s))) instance ToHeaders UpdateItem where toHeaders = const (mconcat ["X-Amz-Target" =# ("DynamoDB_20120810.UpdateItem" :: ByteString), "Content-Type" =# ("application/x-amz-json-1.0" :: ByteString)]) instance ToJSON UpdateItem where toJSON UpdateItem'{..} = object (catMaybes [("ExpressionAttributeNames" .=) <$> _uiExpressionAttributeNames, ("ReturnValues" .=) <$> _uiReturnValues, ("UpdateExpression" .=) <$> _uiUpdateExpression, ("ExpressionAttributeValues" .=) <$> _uiExpressionAttributeValues, ("AttributeUpdates" .=) <$> _uiAttributeUpdates, ("ReturnConsumedCapacity" .=) <$> _uiReturnConsumedCapacity, ("ReturnItemCollectionMetrics" .=) <$> _uiReturnItemCollectionMetrics, ("ConditionExpression" .=) <$> _uiConditionExpression, ("ConditionalOperator" .=) <$> _uiConditionalOperator, ("Expected" .=) <$> _uiExpected, Just ("TableName" .= _uiTableName), Just ("Key" .= _uiKey)]) instance ToPath UpdateItem where toPath = const "/" instance ToQuery UpdateItem where toQuery = const mempty -- | Represents the output of an /UpdateItem/ operation. -- -- /See:/ 'updateItemResponse' smart constructor. data UpdateItemResponse = UpdateItemResponse' { _uirsItemCollectionMetrics :: !(Maybe ItemCollectionMetrics) , _uirsConsumedCapacity :: !(Maybe ConsumedCapacity) , _uirsAttributes :: !(Maybe (Map Text AttributeValue)) , _uirsResponseStatus :: !Int } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'UpdateItemResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'uirsItemCollectionMetrics' -- -- * 'uirsConsumedCapacity' -- -- * 'uirsAttributes' -- -- * 'uirsResponseStatus' updateItemResponse :: Int -- ^ 'uirsResponseStatus' -> UpdateItemResponse updateItemResponse pResponseStatus_ = UpdateItemResponse' { _uirsItemCollectionMetrics = Nothing , _uirsConsumedCapacity = Nothing , _uirsAttributes = Nothing , _uirsResponseStatus = pResponseStatus_ } -- | Undocumented member. uirsItemCollectionMetrics :: Lens' UpdateItemResponse (Maybe ItemCollectionMetrics) uirsItemCollectionMetrics = lens _uirsItemCollectionMetrics (\ s a -> s{_uirsItemCollectionMetrics = a}); -- | Undocumented member. uirsConsumedCapacity :: Lens' UpdateItemResponse (Maybe ConsumedCapacity) uirsConsumedCapacity = lens _uirsConsumedCapacity (\ s a -> s{_uirsConsumedCapacity = a}); -- | A map of attribute values as they appeared before the /UpdateItem/ -- operation. This map only appears if /ReturnValues/ was specified as -- something other than 'NONE' in the request. Each element represents one -- attribute. uirsAttributes :: Lens' UpdateItemResponse (HashMap Text AttributeValue) uirsAttributes = lens _uirsAttributes (\ s a -> s{_uirsAttributes = a}) . _Default . _Map; -- | The response status code. uirsResponseStatus :: Lens' UpdateItemResponse Int uirsResponseStatus = lens _uirsResponseStatus (\ s a -> s{_uirsResponseStatus = a});
fmapfmapfmap/amazonka
amazonka-dynamodb/gen/Network/AWS/DynamoDB/UpdateItem.hs
mpl-2.0
36,622
0
14
8,387
2,274
1,553
721
190
1
{-# LANGUAGE PatternSynonyms #-} module OpenCog.Lojban.Util where import OpenCog.AtomSpace mapFst :: (a -> b) -> (a,c) -> (b,c) mapFst f (a,c) = (f a,c) mapSnd :: (a -> b) -> (c,a) -> (c,b) mapSnd f (c,a) = (c,f a) highTv :: TruthVal highTv = stv 1 0.9 lowTv :: TruthVal lowTv = stv 0.000001 0.01 if' :: Bool -> a -> a -> a if' True a _ = a if' False _ a = a infixr 1 ? (?) :: Bool -> a -> a -> a (?) = if' infixl 8 ... (...) :: (c -> d) -> (a -> b -> c) -> a -> b -> d (...) = (.).(.) pattern CN name <-Node "ConceptNode" name _ pattern PN name <-Node "PredicateNode" name _ pattern GPN name <-Node "GroundedPredicateNode" name _ pattern VN name <-Node "VariableNode" name _ pattern AL l <- Link "AndLink" l _ pattern LL l <- Link "ListLink" l _ pattern NL l <- Link "NotLink" l _ pattern ImpL l tv <- Link "ImplicationLink" l tv pattern InhL l tv <- Link "InheritanceLink" l tv pattern SL l <- Link "SetLink" l _ pattern SSL l <- Link "SatisfyingSetLink" [l] _ pattern EvalL tv p a <- Link "EvaluationLink" [p,a] tv pattern ExL tv p a <- Link "ExistsLink" [p,a] tv pattern CtxL c a <- Link "ContextLink" [c,a] _ pattern SimL a b <- Link "SimilarityLink" [a,b] _ pattern SubL a b <- Link "SubsetLink" [a,b] _ pattern LambdaL a b <- Link "LambdaLink" [a,b] _ cCN name tv = Node "ConceptNode" name tv cPN name tv = Node "PredicateNode" name tv cGPN name tv = Node "GroundedPredicateNode" name tv cVN name = Node "VariableNode" name noTv cAN name = Node "AnchorNode" name noTv cNN name = Node "NumberNode" name noTv cLL a = Link "ListLink" a noTv cSL a = Link "SetLink" a noTv cSimL a b = Link "SimilarityLink" [a,b] noTv cVL a = Link "VariableList" a noTv cInhL tv a b = Link "InheritanceLink" [a,b] tv cImpL tv a b = Link "ImplicationLink" [a,b] tv cIFaoIFL tv a b = Link "AndLink" [cImpL tv a b,cImpL tv b a] tv cEvalL tv a b = Link "EvaluationLink" [a,b] tv cSSL tv a = Link "SatisfyingSetLink" [a] tv cExL tv a b = Link "ExistsLink" [a,b] tv cFAL tv a b = Link "ForAllLink" [a,b] tv cPL a b = Link "PutLink" [a,b] noTv cGL a = Link "GetLink" [a] noTv cAL tv a b = Link "AndLink" [a,b] tv cOL tv a = Link "OrLink" a tv cNL tv a = Link "NotLink" [a] tv cCtxL tv a b = Link "ContextLink" [a,b] tv cLamdaL tv a b = Link "LambdaLink" [a,b] tv mkCtxPre pred atom = Link "EquivalenceLink" [cLamdaL highTv (cVN "1") (cEvalL highTv (pred) (cLL [cVN "1"])) ,cLamdaL highTv (cVN "2") (cCtxL highTv (cVN "2") (atom)) ] highTv pattern CtxPred atom <- Link "EquivalenceLink" [ _ , Link "LambdaLink" [ _ ,Link "ContextLink" [ _ , atom ] _ ] _ ] _ mkPropPre pred atom name = Link "EquivalenceLink" [cLamdaL highTv (cVN "1") (cEvalL highTv (pred) (cLL [cVN "1"])) ,cLamdaL highTv (cVN "2") (cAL highTv (cEvalL highTv (cPN "ckaji_sumit1" lowTv) (cLL [cPN ("ckaji_" ++ name) lowTv,cVN "2"]) ) (cEvalL highTv (cPN "ckaji_sumit2" lowTv) (cLL [cPN ("ckaji_" ++ name) lowTv,atom]) ) ) ] highTv pattern PropPred atom <- Link "EquivalenceLink" [_ , Link "LambdaLink" [ _ , Link "AndLink" [_ , Link "EvaluationLink" [ _ , Link "ListLink" [_,atom] _ ] _ ] _ ] _ ] _ isInteger s = case reads s :: [(Integer, String)] of [(_, "")] -> True _ -> False isDouble s = case reads s :: [(Double, String)] of [(_, "")] -> True _ -> False isNumeric :: String -> Bool isNumeric s = isInteger s || isDouble s
ruiting/opencog
opencog/nlp/lojban/HaskellLib/src/OpenCog/Lojban/Util.hs
agpl-3.0
5,409
1
17
2,777
1,671
858
813
118
2
{-# LANGUAGE ScopedTypeVariables #-} -- | -- Module: Data.Configurator.FromValue -- Copyright: (c) 2016 Leon P Smith -- License: BSD3 -- Maintainer: Leon P Smith <[email protected]> module Data.Configurator.FromValue ( MaybeParser , runMaybeParser , FromMaybeValue(..) , optionalValue , requiredValue , ValueParser , runValueParser , FromValue(..) , ListParser , FromListValue(..) , listValue , listValue' , listElem , ConversionError(..) , ConversionErrorWhy(..) , defaultConversionError -- * Assorted primitive value parsers , boolValue , boundedIntegerValue , integralValue , fractionalValue , realFloatValue , fixedValue , scientificValue , textValue , charValue , typeError , valueError , extraValuesError , missingValueError ) where import Data.Configurator.FromValue.Implementation import Data.Configurator.Types
lpsmith/configurator
Data/Configurator/FromValue.hs
bsd-3-clause
996
0
5
267
136
95
41
33
0
{-# LANGUAGE DoRec, RankNTypes, NamedFieldPuns, RecordWildCards #-} module Distribution.Server.Features.Html ( HtmlFeature(..), initHtmlFeature ) where import Distribution.Server.Framework import qualified Distribution.Server.Framework.ResponseContentTypes as Resource import Distribution.Server.Framework.Templating import Distribution.Server.Features.Core import Distribution.Server.Features.RecentPackages import Distribution.Server.Features.Upload import Distribution.Server.Features.BuildReports import Distribution.Server.Features.BuildReports.Render import Distribution.Server.Features.PackageCandidates import Distribution.Server.Features.Users import Distribution.Server.Features.DownloadCount import Distribution.Server.Features.Search import Distribution.Server.Features.Search as Search import Distribution.Server.Features.PreferredVersions -- [reverse index disabled] import Distribution.Server.Features.ReverseDependencies import Distribution.Server.Features.PackageList import Distribution.Server.Features.Tags import Distribution.Server.Features.Mirror import Distribution.Server.Features.Distro import Distribution.Server.Features.Documentation import Distribution.Server.Features.UserDetails import Distribution.Server.Features.EditCabalFiles import Distribution.Server.Users.Types import qualified Distribution.Server.Users.Group as Group import Distribution.Server.Packages.Types import Distribution.Server.Packages.Render import qualified Distribution.Server.Users.Users as Users import qualified Distribution.Server.Packages.PackageIndex as PackageIndex import Distribution.Server.Users.Group (UserGroup(..)) import Distribution.Server.Features.Distro.Distributions (DistroPackageInfo(..)) -- [reverse index disabled] import Distribution.Server.Packages.Reverse import qualified Distribution.Server.Pages.Package as Pages import Distribution.Server.Pages.Template import Distribution.Server.Pages.Util import qualified Distribution.Server.Pages.Group as Pages -- [reverse index disabled] import qualified Distribution.Server.Pages.Reverse as Pages import qualified Distribution.Server.Pages.Index as Pages import Distribution.Server.Util.CountingMap (cmFind, cmToList) import Distribution.Package import Distribution.Version import Distribution.Text (display) import Distribution.PackageDescription import Data.List (intercalate, intersperse, insert, sortBy) import Data.Function (on) import qualified Data.Map as Map import Data.Set (Set) import qualified Data.Set as Set import Data.Maybe (fromMaybe, isJust) import qualified Data.Text as T import Data.Traversable (traverse) import Control.Applicative (optional) import Data.Array (Array, listArray) import qualified Data.Array as Array import qualified Data.Ix as Ix import Data.Time.Format (formatTime) import System.Locale (defaultTimeLocale) import Text.XHtml.Strict import qualified Text.XHtml.Strict as XHtml import Text.XHtml.Table (simpleTable) import Network.URI (escapeURIString, isUnreserved) -- TODO: move more of the below to Distribution.Server.Pages.*, it's getting -- close to 1K lines, way too much... it's okay to keep data-querying in here, -- but pure HTML generation mostly needlessly clutters up the module. -- Try to make it so no HTML combinators need to be imported. -- -- See the TODO file for more ways to improve the HTML. data HtmlFeature = HtmlFeature { htmlFeatureInterface :: HackageFeature } instance IsHackageFeature HtmlFeature where getFeatureInterface = htmlFeatureInterface -- This feature provides the HTML view to the models of other features -- currently it uses the xhtml package to render HTML (Text.XHtml.Strict) -- -- This means of generating HTML is somewhat temporary, in that a more advanced -- (and better-looking) HTML ajaxy scheme should come about later on. initHtmlFeature :: ServerEnv -> IO (UserFeature -> CoreFeature -> RecentPackagesFeature -> UploadFeature -> PackageCandidatesFeature -> VersionsFeature -- [reverse index disabled] -> ReverseFeature -> TagsFeature -> DownloadFeature -> ListFeature -> SearchFeature -> MirrorFeature -> DistroFeature -> DocumentationFeature -> DocumentationFeature -> ReportsFeature -> UserDetailsFeature -> IO HtmlFeature) initHtmlFeature ServerEnv{serverTemplatesDir, serverTemplatesMode, serverCacheDelay, serverVerbosity = verbosity} = do -- Page templates templates <- loadTemplates serverTemplatesMode [serverTemplatesDir, serverTemplatesDir </> "Html"] [ "maintain.html", "maintain-candidate.html" , "reports.html", "report.html" , "maintain-docs.html" , "distro-monitor.html" , "revisions.html" ] return $ \user core@CoreFeature{packageChangeHook} packages upload candidates versions -- [reverse index disabled] reverse tags download list@ListFeature{itemUpdate} names mirror distros docsCore docsCandidates reportsCore usersdetails -> do -- do rec, tie the knot rec let (feature, packageIndex, packagesPage) = htmlFeature user core packages upload candidates versions tags download list names mirror distros docsCore docsCandidates reportsCore usersdetails (htmlUtilities core tags) mainCache namesCache templates -- Index page caches mainCache <- newAsyncCacheNF packageIndex defaultAsyncCachePolicy { asyncCacheName = "packages index page (by category)", asyncCacheUpdateDelay = serverCacheDelay, asyncCacheSyncInit = False, asyncCacheLogVerbosity = verbosity } namesCache <- newAsyncCacheNF packagesPage defaultAsyncCachePolicy { asyncCacheName = "packages index page (by name)", asyncCacheUpdateDelay = serverCacheDelay, asyncCacheLogVerbosity = verbosity } registerHook itemUpdate $ \_ -> prodAsyncCache mainCache >> prodAsyncCache namesCache registerHook packageChangeHook $ \_ -> prodAsyncCache mainCache >> prodAsyncCache namesCache return feature htmlFeature :: UserFeature -> CoreFeature -> RecentPackagesFeature -> UploadFeature -> PackageCandidatesFeature -> VersionsFeature -> TagsFeature -> DownloadFeature -> ListFeature -> SearchFeature -> MirrorFeature -> DistroFeature -> DocumentationFeature -> DocumentationFeature -> ReportsFeature -> UserDetailsFeature -> HtmlUtilities -> AsyncCache Response -> AsyncCache Response -> Templates -> (HtmlFeature, IO Response, IO Response) htmlFeature user core@CoreFeature{queryGetPackageIndex} recent upload candidates versions -- [reverse index disabled] ReverseFeature{..} tags download list@ListFeature{getAllLists} names mirror distros docsCore docsCandidates reportsCore usersdetails utilities@HtmlUtilities{..} cachePackagesPage cacheNamesPage templates = (HtmlFeature{..}, packageIndex, packagesPage) where htmlFeatureInterface = (emptyHackageFeature "html") { featureResources = htmlResources , featureState = [] , featureCaches = [ CacheComponent { cacheDesc = "packages page by category", getCacheMemSize = memSize <$> readAsyncCache cachePackagesPage } , CacheComponent { cacheDesc = "packages page by name", getCacheMemSize = memSize <$> readAsyncCache cacheNamesPage } ] , featurePostInit = syncAsyncCache cachePackagesPage , featureReloadFiles = reloadTemplates templates } htmlCore = mkHtmlCore utilities user core versions upload tags docsCore reportsCore download distros recent htmlTags htmlPreferred cachePackagesPage cacheNamesPage templates htmlUsers = mkHtmlUsers user usersdetails htmlUploads = mkHtmlUploads utilities upload htmlDocUploads = mkHtmlDocUploads utilities core docsCore templates htmlDownloads = mkHtmlDownloads utilities download htmlReports = mkHtmlReports utilities core reportsCore templates htmlCandidates = mkHtmlCandidates utilities core versions upload docsCandidates candidates templates htmlPreferred = mkHtmlPreferred utilities core versions htmlTags = mkHtmlTags utilities core list tags htmlSearch = mkHtmlSearch utilities list names htmlResources = concat [ htmlCoreResources htmlCore , htmlUsersResources htmlUsers , htmlUploadsResources htmlUploads , htmlDocUploadsResources htmlDocUploads , htmlReportsResources htmlReports , htmlCandidatesResources htmlCandidates , htmlPreferredResources htmlPreferred , htmlDownloadsResources htmlDownloads , htmlTagsResources htmlTags , htmlSearchResources htmlSearch -- and user groups. package maintainers, trustees, admins , htmlGroupResource user (maintainersGroupResource . uploadResource $ upload) , htmlGroupResource user (trusteesGroupResource . uploadResource $ upload) , htmlGroupResource user (uploadersGroupResource . uploadResource $ upload) , htmlGroupResource user (adminResource . userResource $ user) , htmlGroupResource user (mirrorGroupResource . mirrorResource $ mirror) ] -- TODO: write HTML for reports and distros to display the information -- effectively reports {- , (extendResource $ reportsList reports) { resourceGet = [("html", serveReportsList)] } , (extendResource $ reportsPage reports) { resourceGet = [("html", serveReportsPage)] } -} -- distros {- , (extendResource $ distroIndexPage distros) { resourceGet = [("html", serveDistroIndex)] } , (extendResource $ distroAllPage distros) { resourceGet = [("html", serveDistroPackages)] } , (extendResource $ distroPackage distros) { resourceGet = [("html", serveDistroPackage)] } -} -- reverse index (disabled) {- , (extendResource $ reversePackage reverses) { resourceGet = [("html", serveReverse True)] } , (extendResource $ reversePackageOld reverses) { resourceGet = [("html", serveReverse False)] } , (extendResource $ reversePackageAll reverses) { resourceGet = [("html", serveReverseFlat)] } , (extendResource $ reversePackageStats reverses) { resourceGet = [("html", serveReverseStats)] } , (extendResource $ reversePackages reverses) { resourceGet = [("html", serveReverseList)] } -} -- [reverse index disabled] reverses = reverseResource {- [reverse index disabled] -------------------------------------------------------------------------------- -- Reverse serveReverse :: Bool -> DynamicPath -> ServerPart Response serveReverse isRecent dpath = htmlResponse $ withPackageId dpath $ \pkgid -> do let pkgname = packageName pkgid rdisp <- case packageVersion pkgid of Version [] [] -> withPackageAll pkgname $ \_ -> revPackageName pkgname _ -> withPackageVersion pkgid $ \_ -> revPackageId pkgid render <- (if isRecent then renderReverseRecent else renderReverseOld) pkgname rdisp return $ toResponse $ Resource.XHtml $ hackagePage (display pkgname ++ " - Reverse dependencies ") $ Pages.reversePackageRender pkgid (corePackageIdUri "") revr isRecent render serveReverseFlat :: DynamicPath -> ServerPart Response serveReverseFlat dpath = htmlResponse $ withPackageAllPath dpath $ \pkgname _ -> do revCount <- query $ GetReverseCount pkgname pairs <- revPackageFlat pkgname return $ toResponse $ Resource.XHtml $ hackagePage (display pkgname ++ "Flattened reverse dependencies") $ Pages.reverseFlatRender pkgname (corePackageNameUri "") revr revCount pairs serveReverseStats :: DynamicPath -> ServerPart Response serveReverseStats dpath = htmlResponse $ withPackageAllPath dpath $ \pkgname pkgs -> do revCount <- query $ GetReverseCount pkgname return $ toResponse $ Resource.XHtml $ hackagePage (display pkgname ++ "Reverse dependency statistics") $ Pages.reverseStatsRender pkgname (map packageVersion pkgs) (corePackageIdUri "") revr revCount serveReverseList :: DynamicPath -> ServerPart Response serveReverseList _ = do let revr = reverseResource revs triple <- sortedRevSummary revs hackCount <- PackageIndex.indexSize <$> queryGetPackageIndex return $ toResponse $ Resource.XHtml $ hackagePage "Reverse dependencies" $ Pages.reversePackagesRender (corePackageNameUri "") revr hackCount triple -} -------------------------------------------------------------------------------- -- Additional package indices packageIndex :: IO Response packageIndex = do index <- queryGetPackageIndex let htmlIndex = toResponse $ Resource.XHtml $ Pages.packageIndex index return htmlIndex packagesPage :: IO Response packagesPage = do items <- liftIO $ getAllLists let htmlpage = toResponse $ Resource.XHtml $ hackagePage "All packages by name" $ [ h2 << "All packages by name" , ulist ! [theclass "packages"] << map renderItem (Map.elems items) ] return htmlpage {- -- Currently unused, mainly because not all web browsers use eager authentication-sending -- Setting a cookie might work here, albeit one that's stateless for the server, is not -- used for auth and only causes GUI changes, not permission overriding loginWidget :: UserResource -> ServerPart Html loginWidget user = do users <- query State.GetUserDb auth <- Auth.getHackageAuth users return . makeLoginWidget user $ case auth of Left {} -> Nothing Right (_, uinfo) -> Just $ userName uinfo makeLoginWidget :: UserResource -> Maybe UserName -> Html makeLoginWidget user mname = case mname of Nothing -> anchor ! [href $ userLoginUri user Nothing] << "log in" Just uname -> anchor ! [href $ userPageUri user "" uname] << display uname -} {------------------------------------------------------------------------------- Core -------------------------------------------------------------------------------} data HtmlCore = HtmlCore { htmlCoreResources :: [Resource] } mkHtmlCore :: HtmlUtilities -> UserFeature -> CoreFeature -> VersionsFeature -> UploadFeature -> TagsFeature -> DocumentationFeature -> ReportsFeature -> DownloadFeature -> DistroFeature -> RecentPackagesFeature -> HtmlTags -> HtmlPreferred -> AsyncCache Response -> AsyncCache Response -> Templates -> HtmlCore mkHtmlCore HtmlUtilities{..} UserFeature{queryGetUserDb} CoreFeature{coreResource} VersionsFeature{ versionsResource , queryGetDeprecatedFor , queryGetPreferredInfo , withPackagePreferred } UploadFeature{guardAuthorisedAsMaintainerOrTrustee} TagsFeature{queryTagsForPackage} documentationFeature@DocumentationFeature{documentationResource, queryHasDocumentation} reportsFeature DownloadFeature{recentPackageDownloads,totalPackageDownloads} DistroFeature{queryPackageStatus} RecentPackagesFeature{packageRender} HtmlTags{..} HtmlPreferred{..} cachePackagesPage cacheNamesPage templates = HtmlCore{..} where cores@CoreResource{packageInPath, lookupPackageName, lookupPackageId} = coreResource versions = versionsResource docs = documentationResource maintainPackage = (resourceAt "/package/:package/maintain") { resourceGet = [("html", serveMaintainPage)] } htmlCoreResources = [ (extendResource $ corePackagePage cores) { resourceDesc = [(GET, "Show detailed package information")] , resourceGet = [("html", servePackagePage)] } {- , (extendResource $ coreIndexPage cores) { resourceGet = [("html", serveIndexPage)] }, currently in 'core' feature -} , (resourceAt "/packages/names" ) { resourceGet = [("html", const $ readAsyncCache cacheNamesPage)] } , (extendResource $ corePackagesPage cores) { resourceDesc = [(GET, "Show package index")] , resourceGet = [("html", const $ readAsyncCache cachePackagesPage)] } , maintainPackage , (resourceAt "/package/:package/distro-monitor") { resourceDesc = [(GET, "A handy page for distro package change monitor tools")] , resourceGet = [("html", serveDistroMonitorPage)] } , (resourceAt "/package/:package/revisions/") { resourceGet = [("html", serveCabalRevisionsPage)] } ] -- Currently the main package page is thrown together by querying a bunch -- of features about their attributes for the given package. It'll need -- reorganizing to look aesthetic, as opposed to the sleek and simple current -- design that takes the 1990s school of web design. servePackagePage :: DynamicPath -> ServerPartE Response servePackagePage dpath = do pkgid <- packageInPath dpath withPackagePreferred pkgid $ \pkg pkgs -> do -- get the PackageRender from the PkgInfo render <- liftIO $ packageRender pkg let realpkg = rendPkgId render pkgname = packageName realpkg middleHtml = Pages.renderFields render -- render the build status line buildStatus <- renderBuildStatus documentationFeature reportsFeature realpkg let buildStatusHtml = [("Status", buildStatus)] -- get additional information from other features prefInfo <- queryGetPreferredInfo pkgname let infoUrl = fmap (\_ -> preferredPackageUri versions "" pkgname) $ sumRange prefInfo beforeHtml = [Pages.renderVersion realpkg (classifyVersions prefInfo $ map packageVersion pkgs) infoUrl, Pages.renderDependencies render] -- and other package indices distributions <- queryPackageStatus pkgname -- [reverse index disabled] revCount <- revPackageSummary realpkg -- We don't currently keep per-version downloads in memory -- (totalDown, versionDown) <- perVersionDownloads pkg totalDown <- cmFind pkgname `liftM` totalPackageDownloads recentDown <- cmFind pkgname `liftM` recentPackageDownloads let distHtml = case distributions of [] -> [] _ -> [("Distributions", concatHtml . intersperse (toHtml ", ") $ map showDist distributions)] afterHtml = distHtml ++ [Pages.renderDownloads totalDown recentDown {- versionDown $ packageVersion realpkg-} -- [reverse index disabled] ,Pages.reversePackageSummary realpkg revr revCount ] -- bottom sections, currently only documentation hasDocs <- queryHasDocumentation realpkg let docURL | hasDocs = Just $ packageDocsContentUri docs realpkg -- Just $ "/package" <//> display realpkg <//> "docs" | otherwise = Nothing -- extra features like tags and downloads tags <- queryTagsForPackage pkgname let tagLinks = toHtml [anchor ! [href "/packages/tags"] << "Tags", toHtml ": ", toHtml (renderTags tags)] deprs <- queryGetDeprecatedFor pkgname let deprHtml = case deprs of Just fors -> paragraph ! [thestyle "color: red"] << [toHtml "Deprecated", case fors of [] -> noHtml _ -> concatHtml . (toHtml " in favor of ":) . intersperse (toHtml ", ") . map (\for -> anchor ! [href $ corePackageNameUri cores "" for] << display for) $ fors] Nothing -> noHtml -- and put it all together return $ toResponse $ Resource.XHtml $ Pages.packagePage render [tagLinks] [deprHtml] (beforeHtml ++ middleHtml ++ afterHtml ++ buildStatusHtml) [] docURL False where showDist (dname, info) = toHtml (display dname ++ ":") +++ anchor ! [href $ distroUrl info] << toHtml (display $ distroVersion info) serveMaintainPage :: DynamicPath -> ServerPartE Response serveMaintainPage dpath = do pkgname <- packageInPath dpath pkgs <- lookupPackageName pkgname guardAuthorisedAsMaintainerOrTrustee (pkgname :: PackageName) template <- getTemplate templates "maintain.html" return $ toResponse $ template [ "pkgname" $= pkgname , "versions" $= map packageId pkgs ] serveDistroMonitorPage :: DynamicPath -> ServerPartE Response serveDistroMonitorPage dpath = do pkgname <- packageInPath dpath pkgs <- lookupPackageName pkgname template <- getTemplate templates "distro-monitor.html" return $ toResponse $ template [ "pkgname" $= pkgname , "versions" $= map packageId pkgs ] serveCabalRevisionsPage :: DynamicPath -> ServerPartE Response serveCabalRevisionsPage dpath = do pkginfo <- packageInPath dpath >>= lookupPackageId users <- queryGetUserDb template <- getTemplate templates "revisions.html" let pkgid = packageId pkginfo pkgname = packageName pkginfo revisions = (pkgData pkginfo, pkgUploadData pkginfo) : pkgDataOld pkginfo numRevisions = length revisions revchanges = [ case diffCabalRevisions pkgid (cabalFileByteString old) (cabalFileByteString new) of Left _err -> [] Right changes -> changes | ((new, _), (old, _)) <- zip revisions (tail revisions) ] return $ toResponse $ template [ "pkgname" $= pkgname , "pkgid" $= pkgid , "revisions" $= zipWith3 (revisionToTemplate users) (map snd revisions) [numRevisions-1, numRevisions-2..] (revchanges ++ [[]]) ] where revisionToTemplate :: Users.Users -> UploadInfo -> Int -> [Change] -> TemplateVal revisionToTemplate users (utime, uid) revision changes = let uname = Users.userIdToName users uid in templateDict [ templateVal "number" revision , templateVal "user" (display uname) , templateVal "time" (formatTime defaultTimeLocale "%c" utime) , templateVal "changes" changes ] {------------------------------------------------------------------------------- Users -------------------------------------------------------------------------------} data HtmlUsers = HtmlUsers { htmlUsersResources :: [Resource] } mkHtmlUsers :: UserFeature -> UserDetailsFeature -> HtmlUsers mkHtmlUsers UserFeature{..} UserDetailsFeature{..} = HtmlUsers{..} where users = userResource htmlUsersResources = [ -- list of users with user links; if admin, a link to add user page (extendResource $ userList users) { resourceDesc = [ (GET, "list of users") , (POST, "create a new user") ] , resourceGet = [ ("html", serveUserList) ] , resourcePost = [ ("html", \_ -> adminAddUser) ] } -- form to post to /users/ , (resourceAt "/users/register") { resourceDesc = [ (GET, "show \"add user\" form") ] , resourceGet = [ ("html", addUserForm) ] } -- user page with link to password form and list of groups (how to do this?) , (extendResource $ userPage users) { resourceDesc = [ (GET, "show user page") ] , resourceGet = [ ("html", serveUserPage) ] } -- form to PUT password , (extendResource $ passwordResource users) { resourceDesc = [ (GET, "show password change form") , (PUT, "change password") ] , resourceGet = [ ("html", servePasswordForm) ] , resourcePut = [ ("html", servePutPassword) ] } ] serveUserList :: DynamicPath -> ServerPartE Response serveUserList _ = do userlist <- Users.enumerateActiveUsers <$> queryGetUserDb let hlist = unordList [ anchor ! [href $ userPageUri users "" uname] << display uname | (_, uinfo) <- userlist, let uname = userName uinfo ] ok $ toResponse $ Resource.XHtml $ hackagePage "Hackage users" [h2 << "Hackage users", hlist] serveUserPage :: DynamicPath -> ServerPartE Response serveUserPage dpath = do uname <- userNameInPath dpath uid <- lookupUserName uname udetails <- queryUserDetails uid let realname = maybe (display uname) (T.unpack . accountName) udetails uris <- getGroupIndex uid uriPairs <- forM uris $ \uri -> do desc <- getIndexDesc uri return $ Pages.renderGroupName desc (Just uri) return $ toResponse $ Resource.XHtml $ hackagePage realname [ h2 << realname , case uriPairs of [] -> noHtml _ -> toHtml [ toHtml $ display uname ++ " is part of the following groups:" , unordList uriPairs ] ] addUserForm :: DynamicPath -> ServerPartE Response addUserForm _ = return $ toResponse $ Resource.XHtml $ hackagePage "Register account" [ paragraph << "Administrators can register new user accounts here." , form ! [theclass "box", XHtml.method "post", action $ userListUri users ""] << [ simpleTable [] [] [ makeInput [thetype "text"] "username" "User name" , makeInput [thetype "password"] "password" "Password" , makeInput [thetype "password"] "repeat-password" "Confirm password" ] , paragraph << input ! [thetype "submit", value "Create user"] ] ] servePasswordForm :: DynamicPath -> ServerPartE Response servePasswordForm dpath = do uname <- userNameInPath dpath pathUid <- lookupUserName uname uid <- guardAuthenticated -- FIXME: why are we duplicating auth decisions in this feature? canChange <- canChangePassword uid pathUid case canChange of False -> errForbidden "Can't change password" [MText "You're neither this user nor an admin."] True -> return $ toResponse $ Resource.XHtml $ hackagePage "Change password" [ toHtml "Change your password. You'll be prompted for authentication upon submission, if you haven't logged in already." , form ! [theclass "box", XHtml.method "post", action $ userPasswordUri userResource "" uname] << [ simpleTable [] [] [ makeInput [thetype "password"] "password" "Password" , makeInput [thetype "password"] "repeat-password" "Confirm password" ] , paragraph << [ hidden "_method" "PUT" --method override , input ! [thetype "submit", value "Change password"] ] ] ] servePutPassword :: DynamicPath -> ServerPartE Response servePutPassword dpath = do uname <- userNameInPath dpath changePassword uname return $ toResponse $ Resource.XHtml $ hackagePage "Changed password" [toHtml "Changed password for ", anchor ! [href $ userPageUri users "" uname] << display uname] {------------------------------------------------------------------------------- Uploads -------------------------------------------------------------------------------} data HtmlUploads = HtmlUploads { htmlUploadsResources :: [Resource] } mkHtmlUploads :: HtmlUtilities -> UploadFeature -> HtmlUploads mkHtmlUploads HtmlUtilities{..} UploadFeature{..} = HtmlUploads{..} where uploads = uploadResource htmlUploadsResources = [ -- uploads -- serve upload result as HTML (extendResource $ uploadIndexPage uploads) { resourceDesc = [(POST, "Upload package")] , resourcePost = [("html", serveUploadResult)] } -- form for uploading , (resourceAt "/packages/upload") { resourceGet = [("html", serveUploadForm)] } ] serveUploadForm :: DynamicPath -> ServerPartE Response serveUploadForm _ = do return $ toResponse $ Resource.XHtml $ hackagePage "Upload package" [ h2 << "Upload package" , paragraph << [toHtml "See also the ", anchor ! [href "/upload"] << "upload help page", toHtml "."] , form ! [theclass "box", XHtml.method "post", action "/packages/", enctype "multipart/form-data"] << [ input ! [thetype "file", name "package"] , input ! [thetype "submit", value "Upload package"] ] ] serveUploadResult :: DynamicPath -> ServerPartE Response serveUploadResult _ = do res <- uploadPackage let warns = uploadWarnings res pkgid = packageId (uploadDesc res) return $ toResponse $ Resource.XHtml $ hackagePage "Upload successful" $ [ paragraph << [toHtml "Successfully uploaded ", packageLink pkgid, toHtml "!"] ] ++ case warns of [] -> [] _ -> [paragraph << "There were some warnings:", unordList warns] {------------------------------------------------------------------------------- Documentation uploads -------------------------------------------------------------------------------} data HtmlDocUploads = HtmlDocUploads { htmlDocUploadsResources :: [Resource] } mkHtmlDocUploads :: HtmlUtilities -> CoreFeature -> DocumentationFeature -> Templates -> HtmlDocUploads mkHtmlDocUploads HtmlUtilities{..} CoreFeature{coreResource} DocumentationFeature{..} templates = HtmlDocUploads{..} where CoreResource{packageInPath} = coreResource htmlDocUploadsResources = [ (extendResource $ packageDocsWhole documentationResource) { resourcePut = [ ("html", serveUploadDocumentation) ] , resourceDelete = [ ("html", serveDeleteDocumentation) ] } , (resourceAt "/package/:package/maintain/docs") { resourceGet = [("html", serveDocUploadForm)] } ] serveUploadDocumentation :: DynamicPath -> ServerPartE Response serveUploadDocumentation dpath = do pkgid <- packageInPath dpath uploadDocumentation dpath >> ignoreFilters -- Override 204 No Content return $ toResponse $ Resource.XHtml $ hackagePage "Documentation uploaded" $ [ paragraph << [toHtml "Successfully uploaded documentation for ", packageLink pkgid, toHtml "!"] ] serveDeleteDocumentation :: DynamicPath -> ServerPartE Response serveDeleteDocumentation dpath = do pkgid <- packageInPath dpath deleteDocumentation dpath >> ignoreFilters -- Override 204 No Content return $ toResponse $ Resource.XHtml $ hackagePage "Documentation deleted" $ [ paragraph << [toHtml "Successfully deleted documentation for ", packageLink pkgid, toHtml "!"] ] serveDocUploadForm :: DynamicPath -> ServerPartE Response serveDocUploadForm dpath = do pkgid <- packageInPath dpath template <- getTemplate templates "maintain-docs.html" return $ toResponse $ template [ "pkgid" $= (pkgid :: PackageIdentifier) ] {------------------------------------------------------------------------------- Build reports -------------------------------------------------------------------------------} data HtmlReports = HtmlReports { htmlReportsResources :: [Resource] } mkHtmlReports :: HtmlUtilities -> CoreFeature -> ReportsFeature -> Templates -> HtmlReports mkHtmlReports HtmlUtilities{..} CoreFeature{..} ReportsFeature{..} templates = HtmlReports{..} where CoreResource{packageInPath} = coreResource ReportsResource{..} = reportsResource htmlReportsResources = [ (extendResource reportsList) { resourceGet = [ ("html", servePackageReports) ] } , (extendResource reportsPage) { resourceGet = [ ("html", servePackageReport) ] } ] servePackageReports :: DynamicPath -> ServerPartE Response servePackageReports dpath = packageReports dpath $ \reports -> do pkgid <- packageInPath dpath template <- getTemplate templates "reports.html" return $ toResponse $ template [ "pkgid" $= (pkgid :: PackageIdentifier) , "reports" $= reports ] servePackageReport :: DynamicPath -> ServerPartE Response servePackageReport dpath = do (repid, report, mlog) <- packageReport dpath mlog' <- traverse queryBuildLog mlog pkgid <- packageInPath dpath template <- getTemplate templates "report.html" return $ toResponse $ template [ "pkgid" $= (pkgid :: PackageIdentifier) , "report" $= (repid, report) , "log" $= toMessage <$> mlog' ] {------------------------------------------------------------------------------- Candidates -------------------------------------------------------------------------------} data HtmlCandidates = HtmlCandidates { htmlCandidatesResources :: [Resource] } mkHtmlCandidates :: HtmlUtilities -> CoreFeature -> VersionsFeature -> UploadFeature -> DocumentationFeature -> PackageCandidatesFeature -> Templates -> HtmlCandidates mkHtmlCandidates HtmlUtilities{..} CoreFeature{ coreResource = CoreResource{packageInPath} , queryGetPackageIndex } VersionsFeature{ queryGetPreferredInfo } UploadFeature{ guardAuthorisedAsMaintainer } DocumentationFeature{documentationResource, queryHasDocumentation} PackageCandidatesFeature{..} templates = HtmlCandidates{..} where candidates = candidatesResource candidatesCore = candidatesCoreResource docs = documentationResource pkgCandUploadForm = (resourceAt "/package/:package/candidate/upload") { resourceGet = [("html", servePackageCandidateUpload)] } candMaintainForm = (resourceAt "/package/:package/candidate/maintain") { resourceGet = [("html", serveCandidateMaintain)] } htmlCandidatesResources = [ -- candidates -- list of all packages which have candidates (extendResource $ corePackagesPage candidatesCore) { resourceDesc = [ (GET, "Show all package candidates") , (POST, "Upload a new candidate") ] , resourceGet = [ ("html", serveCandidatesPage) ] , resourcePost = [ ("html", \_ -> postCandidate) ] } -- TODO: use custom functions, not htmlResponse , (extendResource $ packageCandidatesPage candidates) { resourceDesc = [ (GET, "Show candidate upload form") , (POST, "Upload new package candidate") ] , resourceGet = [ ("html", servePackageCandidates pkgCandUploadForm) ] , resourcePost = [ ("", postPackageCandidate) ] } -- package page for a candidate , (extendResource $ corePackagePage candidatesCore) { resourceDesc = [ (GET, "Show candidate maintenance form") , (PUT, "Upload new package candidate") , (DELETE, "Delete a package candidate") ] , resourceGet = [("html", serveCandidatePage candMaintainForm)] , resourcePut = [("html", putPackageCandidate)] , resourceDelete = [("html", doDeleteCandidate)] } -- form for uploading candidate , (resourceAt "/packages/candidates/upload") { resourceDesc = [ (GET, "Show package candidate upload form") ] , resourceGet = [ ("html", serveCandidateUploadForm) ] } -- form for uploading candidate for a specific package version , pkgCandUploadForm -- maintenance for candidate packages , candMaintainForm -- form for publishing package , (extendResource $ publishPage candidates) { resourceDesc = [ (GET, "Show candidate publish form") , (POST, "Publish a package candidate") ] , resourceGet = [ ("html", servePublishForm) ] , resourcePost = [ ("html", servePostPublish) ] } , (extendResource $ deletePage candidates) { resourceDesc = [ (GET, "Show candidate deletion form") , (POST, "Delete a package candidate") ] , resourceGet = [ ("html", serveDeleteForm) ] , resourcePost = [ ("html", doDeleteCandidate) ] } ] serveCandidateUploadForm :: DynamicPath -> ServerPartE Response serveCandidateUploadForm _ = do return $ toResponse $ Resource.XHtml $ hackagePage "Checking and uploading candidates" [ h2 << "Checking and uploading candidates" , paragraph << [toHtml "See also the ", anchor ! [href "/upload"] << "upload help page", toHtml "."] , form ! [theclass "box", XHtml.method "post", action "/packages/candidates/", enctype "multipart/form-data"] << [ input ! [thetype "file", name "package"] , input ! [thetype "submit", value "Upload candidate"] ] ] servePackageCandidateUpload :: DynamicPath -> ServerPartE Response servePackageCandidateUpload _ = do return $ toResponse $ Resource.XHtml $ hackagePage "Checking and uploading candidates" [ form ! [theclass "box", XHtml.method "post", action "/packages/candidates/", enctype "multipart/form-data"] << [ input ! [thetype "file", name "package"] , input ! [thetype "submit", value "Upload candidate"] ] ] serveCandidateMaintain :: DynamicPath -> ServerPartE Response serveCandidateMaintain dpath = do candidate <- packageInPath dpath >>= lookupCandidateId guardAuthorisedAsMaintainer (packageName candidate) template <- getTemplate templates "maintain-candidate.html" return $ toResponse $ template [ "pkgname" $= packageName candidate , "pkgversion" $= packageVersion candidate ] {-some useful URIs here: candidateUri check "" pkgid, packageCandidatesUri check "" pkgid, publishUri check "" pkgid-} serveCandidatePage :: Resource -> DynamicPath -> ServerPartE Response serveCandidatePage maintain dpath = do cand <- packageInPath dpath >>= lookupCandidateId candRender <- liftIO $ candidateRender cand let PackageIdentifier pkgname version = packageId cand render = candPackageRender candRender otherVersions <- map packageVersion . flip PackageIndex.lookupPackageName pkgname <$> queryGetPackageIndex prefInfo <- queryGetPreferredInfo pkgname let sectionHtml = [Pages.renderVersion (packageId cand) (classifyVersions prefInfo $ insert version otherVersions) Nothing, Pages.renderDependencies render] ++ Pages.renderFields render maintainHtml = anchor ! [href $ renderResource maintain [display $ packageId cand]] << "maintain" -- bottom sections, currently only documentation hasDocs <- queryHasDocumentation (packageId cand) let docURL | hasDocs = Just $ packageDocsContentUri docs (packageId cand) -- Just $ "/package" <//> display realpkg <//> "docs" | otherwise = Nothing -- also utilize hasIndexedPackage :: Bool let warningBox = case renderWarnings candRender of [] -> [] warn -> [thediv ! [theclass "notification"] << [toHtml "Warnings:", unordList warn]] return $ toResponse $ Resource.XHtml $ Pages.packagePage render [maintainHtml] warningBox sectionHtml [] docURL True servePublishForm :: DynamicPath -> ServerPartE Response servePublishForm dpath = do candidate <- packageInPath dpath >>= lookupCandidateId guardAuthorisedAsMaintainer (packageName candidate) let pkgid = packageId candidate packages <- queryGetPackageIndex case checkPublish packages candidate of Just err -> throwError err Nothing -> do return $ toResponse $ Resource.XHtml $ hackagePage "Publishing candidates" [form ! [theclass "box", XHtml.method "post", action $ publishUri candidatesResource "" pkgid] << input ! [thetype "submit", value "Publish package"]] serveCandidatesPage :: DynamicPath -> ServerPartE Response serveCandidatesPage _ = do cands <- queryGetCandidateIndex return $ toResponse $ Resource.XHtml $ hackagePage "Package candidates" [ h2 << "Package candidates" , paragraph << [ toHtml "Here follow all the candidate package versions on Hackage. " , thespan ! [thestyle "color: gray"] << [ toHtml "[" , anchor ! [href "/packages/candidates/upload"] << "upload" , toHtml "]" ] ] , unordList $ map showCands $ PackageIndex.allPackagesByName cands ] -- note: each of the lists here should be non-empty, according to PackageIndex where showCands pkgs = let desc = packageDescription . pkgDesc . candPkgInfo $ last pkgs pkgname = packageName desc in [ anchor ! [href $ packageCandidatesUri candidates "" pkgname ] << display pkgname , toHtml ": " , toHtml $ intersperse (toHtml ", ") $ flip map pkgs $ \pkg -> anchor ! [href $ corePackageIdUri candidatesCore "" (packageId pkg)] << display (packageVersion pkg) , toHtml $ ". " ++ description desc ] servePackageCandidates :: Resource -> DynamicPath -> ServerPartE Response servePackageCandidates candPkgUp dpath = do pkgname <- packageInPath dpath pkgs <- lookupCandidateName pkgname return $ toResponse $ Resource.XHtml $ hackagePage "Package candidates" $ [ h3 << ("Candidates for " ++ display pkgname) ] ++ case pkgs of [] -> [ toHtml "No candidates exist for ", packageNameLink pkgname, toHtml ". Upload one for " , anchor ! [href $ renderResource candPkgUp [display pkgname]] << "this" , toHtml " or " , anchor ! [href $ "/packages/candidates/upload"] << "another" , toHtml " package?" ] _ -> [ unordList $ flip map pkgs $ \pkg -> anchor ! [href $ corePackageIdUri candidatesCore "" $ packageId pkg] << display (packageVersion pkg) ] -- TODO: make publishCandidate a member of the PackageCandidates feature, just like -- putDeprecated and putPreferred are for the Versions feature. servePostPublish :: DynamicPath -> ServerPartE Response servePostPublish dpath = do uresult <- publishCandidate dpath False return $ toResponse $ Resource.XHtml $ hackagePage "Publish successful" $ [ paragraph << [toHtml "Successfully published ", packageLink (packageId $ uploadDesc uresult), toHtml "!"] ] ++ case uploadWarnings uresult of [] -> [] warns -> [paragraph << "There were some warnings:", unordList warns] serveDeleteForm :: DynamicPath -> ServerPartE Response serveDeleteForm dpath = do candidate <- packageInPath dpath >>= lookupCandidateId guardAuthorisedAsMaintainer (packageName candidate) let pkgid = packageId candidate return $ toResponse $ Resource.XHtml $ hackagePage "Deleting candidates" [form ! [theclass "box", XHtml.method "post", action $ deleteUri candidatesResource "" pkgid] << input ! [thetype "submit", value "Delete package candidate"]] {------------------------------------------------------------------------------- Preferred versions -------------------------------------------------------------------------------} data HtmlPreferred = HtmlPreferred { htmlPreferredResources :: [Resource] , editPreferred :: Resource , editDeprecated :: Resource } mkHtmlPreferred :: HtmlUtilities -> CoreFeature -> VersionsFeature -> HtmlPreferred mkHtmlPreferred HtmlUtilities{..} CoreFeature{ coreResource = CoreResource{ packageInPath , lookupPackageName } } VersionsFeature{..} = HtmlPreferred{..} where versions = versionsResource editDeprecated = (resourceAt "/package/:package/deprecated/edit") { resourceGet = [("html", serveDeprecateForm)] } editPreferred = (resourceAt "/package/:package/preferred/edit") { resourceGet = [("html", servePreferForm)] } htmlPreferredResources = [ -- preferred versions editDeprecated , editPreferred , (extendResource $ preferredResource versions) { resourceGet = [("html", servePreferredSummary)] } , (extendResource $ preferredPackageResource versions) { resourceGet = [("html", servePackagePreferred editPreferred)] , resourcePut = [("html", servePutPreferred)] } , (extendResource $ deprecatedResource versions) { resourceGet = [("html", serveDeprecatedSummary)] } , (extendResource $ deprecatedPackageResource versions) { resourceGet = [("html", servePackageDeprecated editDeprecated)] , resourcePut = [("html", servePutDeprecated )] } ] -- This feature is in great need of a Pages module serveDeprecatedSummary :: DynamicPath -> ServerPartE Response serveDeprecatedSummary _ = doDeprecatedsRender >>= \renders -> do return $ toResponse $ Resource.XHtml $ hackagePage "Deprecated packages" [ h2 << "Deprecated packages" , unordList $ flip map renders $ \(pkg, pkgs) -> [ packageNameLink pkg, toHtml ": ", deprecatedText pkgs ] ] deprecatedText :: [PackageName] -> Html deprecatedText [] = toHtml "deprecated" deprecatedText pkgs = toHtml [ toHtml "deprecated in favor of " , concatHtml $ intersperse (toHtml ", ") (map packageNameLink pkgs) ] servePackageDeprecated :: Resource -> DynamicPath -> ServerPartE Response servePackageDeprecated deprEdit dpath = do pkgname <- packageInPath dpath mpkg <- doDeprecatedRender pkgname return $ toResponse $ Resource.XHtml $ hackagePage "Deprecated status" [ h2 << "Deprecated status" , paragraph << [ toHtml $ case mpkg of Nothing -> [packageNameLink pkgname, toHtml " is not deprecated"] Just pkgs -> [packageNameLink pkgname, toHtml " is ", deprecatedText pkgs] , thespan ! [thestyle "color: gray"] << [ toHtml " [maintainers: " , anchor ! [href $ renderResource deprEdit [display pkgname]] << "edit" , toHtml "]" ] ] ] servePreferredSummary :: DynamicPath -> ServerPartE Response servePreferredSummary _ = doPreferredsRender >>= \renders -> do return $ toResponse $ Resource.XHtml $ hackagePage "Preferred versions" [ h2 << "Preferred versions" , case renders of [] -> paragraph << "There are no global preferred versions." _ -> unordList $ flip map renders $ \(pkgname, pref) -> [ packageNameLink pkgname , unordList [varList "Preferred ranges" (rendRanges pref), varList "Deprecated versions" (map display $ rendVersions pref), toHtml ["Calculated range: ", rendSumRange pref]] ] , paragraph << [ anchor ! [href "/packages/preferred-versions"] << "preferred-versions" , toHtml " is the text file served with every index tarball that contains this information." ] ] where varList summ [] = toHtml $ summ ++ ": none" varList summ xs = toHtml $ summ ++ ": " ++ intercalate ", " xs packagePrefAbout :: Maybe Resource -> PackageName -> [Html] packagePrefAbout maybeEdit pkgname = [ paragraph << [ anchor ! [href $ preferredUri versions ""] << "Preferred and deprecated versions" , toHtml $ " can be used to influence Cabal's decisions about which versions of " , packageNameLink pkgname , toHtml " to install. If a range of versions is preferred, it means that the installer won't install a non-preferred package version unless it is explicitly specified or if it's the only choice the installer has. Deprecating a version adds a range which excludes just that version. All of this information is collected in the " , anchor ! [href "/packages/preferred-versions"] << "preferred-versions" , toHtml " file that's included in the index tarball." , flip (maybe noHtml) maybeEdit $ \prefEdit -> thespan ! [thestyle "color: gray"] << [ toHtml " [maintainers: " , anchor ! [href $ renderResource prefEdit [display pkgname]] << "edit" , toHtml "]" ] ] , paragraph << [ toHtml "If all the available versions of a package are non-preferred or deprecated, cabal-install will treat this the same as if none of them are. This feature doesn't affect whether or not to install a package, only for selecting versions after a given package has decided to be installed. " , anchor ! [href $ deprecatedPackageUri versions "" pkgname] << "Entire-package deprecation" , toHtml " is also available, but it's separate from preferred versions." ] ] servePackagePreferred :: Resource -> DynamicPath -> ServerPartE Response servePackagePreferred prefEdit dpath = do pkgname <- packageInPath dpath pkgs <- lookupPackageName pkgname pref <- doPreferredRender pkgname let dtitle = display pkgname ++ ": preferred and deprecated versions" prefInfo <- queryGetPreferredInfo pkgname return $ toResponse $ Resource.XHtml $ hackagePage dtitle --needs core, preferredVersions, pkgname [ h2 << dtitle , concatHtml $ packagePrefAbout (Just prefEdit) pkgname , h4 << "Stored information" , case rendRanges pref of [] -> paragraph << [display pkgname ++ " has no preferred version ranges."] prefs -> paragraph << ["Preferred versions for " ++ display pkgname ++ ":"] +++ unordList prefs , case rendVersions pref of [] -> paragraph << ["It has no deprecated versions."] deprs -> paragraph << [ "Explicitly deprecated versions for " ++ display pkgname ++ " include: " , intercalate ", " (map display deprs)] , toHtml "The version range given to this package, therefore, is " +++ strong (toHtml $ rendSumRange pref) , h4 << "Versions affected" , paragraph << "Blue versions are normal versions. Green are those out of any preferred version ranges. Gray are deprecated." , paragraph << (snd $ Pages.renderVersion (PackageIdentifier pkgname $ Version [] []) (classifyVersions prefInfo $ map packageVersion pkgs) Nothing) ] servePutPreferred :: DynamicPath -> ServerPartE Response servePutPreferred dpath = do pkgname <- packageInPath dpath putPreferred pkgname return $ toResponse $ Resource.XHtml $ hackagePage "Set preferred versions" [ h2 << "Set preferred versions" , paragraph << [ toHtml "Set the " , anchor ! [href $ preferredPackageUri versionsResource "" pkgname] << "preferred versions" , toHtml " for " , packageNameLink pkgname , toHtml "."] ] servePutDeprecated :: DynamicPath -> ServerPartE Response servePutDeprecated dpath = do pkgname <- packageInPath dpath wasDepr <- putDeprecated pkgname let dtitle = if wasDepr then "Package deprecated" else "Package undeprecated" return $ toResponse $ Resource.XHtml $ hackagePage dtitle [ h2 << dtitle , paragraph << [ toHtml "Set the " , anchor ! [href $ deprecatedPackageUri versionsResource "" pkgname] << "deprecated status" , toHtml " for " , packageNameLink pkgname , toHtml "."] ] -- deprecated: checkbox, by: text field, space-separated list of packagenames serveDeprecateForm :: DynamicPath -> ServerPartE Response serveDeprecateForm dpath = do pkgname <- packageInPath dpath mpkg <- doDeprecatedRender pkgname let (isDepr, mfield) = case mpkg of Just pkgs -> (True, unwords $ map display pkgs) Nothing -> (False, "") return $ toResponse $ Resource.XHtml $ hackagePage "Deprecate package" [paragraph << [toHtml "Configure deprecation for ", packageNameLink pkgname], form . ulist ! [theclass "box", XHtml.method "post", action $ deprecatedPackageUri versionsResource "" pkgname] << [ hidden "_method" "PUT" , li . toHtml $ makeCheckbox isDepr "deprecated" "on" "Deprecate package" , li . toHtml $ makeInput [thetype "text", value mfield] "by" "Superseded by: " ++ [br, toHtml "(Optional; space-separated list of package names)"] , paragraph << input ! [thetype "submit", value "Set status"] ]] -- preferred: text box (one version range per line). deprecated: list of text boxes with same name servePreferForm :: DynamicPath -> ServerPartE Response servePreferForm dpath = do pkgname <- packageInPath dpath pkgs <- lookupPackageName pkgname pref <- doPreferredRender pkgname let allVersions = map packageVersion pkgs rangesList = rendRanges pref deprVersions = rendVersions pref return $ toResponse $ Resource.XHtml $ hackagePage "Adjust preferred versions" [concatHtml $ packagePrefAbout Nothing pkgname, form ! [theclass "box", XHtml.method "post", action $ preferredPackageUri versionsResource "" pkgname] << [ hidden "_method" "PUT" , paragraph << "Preferred version ranges." , paragraph << textarea ! [name "preferred", rows $ show (4::Int), cols $ show (80::Int)] << unlines rangesList , paragraph << "Deprecated versions." , toHtml $ intersperse (toHtml " ") $ map (\v -> toHtml $ makeCheckbox (v `elem` deprVersions) "deprecated" (display v) (display v)) allVersions , paragraph << input ! [thetype "submit", value "Set status"] ]] {------------------------------------------------------------------------------- Downloads -------------------------------------------------------------------------------} data HtmlDownloads = HtmlDownloads { htmlDownloadsResources :: [Resource] } mkHtmlDownloads :: HtmlUtilities -> DownloadFeature -> HtmlDownloads mkHtmlDownloads HtmlUtilities{..} DownloadFeature{..} = HtmlDownloads{..} where downs = downloadResource -- downloads htmlDownloadsResources = [ (extendResource $ topDownloads downs) { resourceGet = [("html", serveDownloadTop)] } ] serveDownloadTop :: DynamicPath -> ServerPartE Response serveDownloadTop _ = do pkgList <- sortedPackages `liftM` recentPackageDownloads return $ toResponse $ Resource.XHtml $ hackagePage "Total downloads" $ [ h2 << "Downloaded packages" , thediv << table << downTableRows pkgList ] where downTableRows pkgList = [ tr << [ th << "Package name", th << "Downloads" ] ] ++ [ tr ! [theclass (if odd n then "odd" else "even")] << [ td << packageNameLink pkgname , td << [ toHtml $ (show count) ] ] | ((pkgname, count), n) <- zip pkgList [(1::Int)..] ] sortedPackages :: RecentDownloads -> [(PackageName, Int)] sortedPackages = sortBy (flip compare `on` snd) . cmToList {------------------------------------------------------------------------------- Tags -------------------------------------------------------------------------------} data HtmlTags = HtmlTags { htmlTagsResources :: [Resource] , tagEdit :: Resource } mkHtmlTags :: HtmlUtilities -> CoreFeature -> ListFeature -> TagsFeature -> HtmlTags mkHtmlTags HtmlUtilities{..} CoreFeature{ coreResource = CoreResource{ packageInPath , lookupPackageName } } ListFeature{makeItemList} TagsFeature{..} = HtmlTags{..} where tags = tagsResource tagEdit = (resourceAt "/package/:package/tags/edit") { resourceGet = [("html", serveTagsForm)] } htmlTagsResources = [ (extendResource $ tagsListing tags) { resourceGet = [("html", serveTagsListing)] } , (extendResource $ tagListing tags) { resourceGet = [("html", serveTagListing)] } , (extendResource $ packageTagsListing tags) { resourcePut = [("html", putPackageTags)], resourceGet = [] } , tagEdit -- (extendResource $ packageTagsEdit tags) { resourceGet = [("html", serveTagsForm)] } ] serveTagsListing :: DynamicPath -> ServerPartE Response serveTagsListing _ = do tagList <- queryGetTagList let withCounts = filter ((>0) . snd) . map (\(tg, pkgs) -> (tg, Set.size pkgs)) $ tagList countSort = sortBy (flip compare `on` snd) withCounts return $ toResponse $ Resource.XHtml $ hackagePage "Hackage tags" $ [ h2 << "Hackage tags" , h4 << "By name" , paragraph ! [theclass "toc"] << (intersperse (toHtml ", ") $ map (tagItem . fst) withCounts) , h4 << "By frequency" , paragraph ! [theclass "toc"] << (intersperse (toHtml ", ") $ map (toHtml . tagCountItem) countSort) ] where tagCountItem (tg, count) = [ tagItem tg , toHtml $ " (" ++ show count ++ ")" ] tagItem tg = anchor ! [href $ tagUri tags "" tg] << display tg serveTagListing :: DynamicPath -> ServerPartE Response serveTagListing dpath = withTagPath dpath $ \tg pkgnames -> do let tagd = "Packages tagged " ++ display tg pkgs = Set.toList pkgnames items <- liftIO $ makeItemList pkgs let (mtag, histogram) = Map.updateLookupWithKey (\_ _ -> Nothing) tg $ tagHistogram items -- make a 'related tags' section, so exclude this tag from the histogram count = fromMaybe 0 mtag return $ toResponse $ Resource.XHtml $ hackagePage tagd $ [ h2 << tagd , case items of [] -> toHtml "No packages have this tag." _ -> toHtml [ paragraph << [if count==1 then "1 package has" else show count ++ " packages have", " this tag."] , paragraph ! [theclass "toc"] << [toHtml "Related tags: ", toHtml $ showHistogram histogram] , ulist ! [theclass "packages"] << map renderItem items ] ] where showHistogram hist = (++takeHtml) . intersperse (toHtml ", ") $ map histogramEntry $ take takeAmount sortHist where hsize = Map.size hist takeAmount = max (div (hsize*2) 3) 12 takeHtml = if takeAmount >= hsize then [] else [toHtml ", ..."] sortHist = sortBy (flip compare `on` snd) $ Map.toList hist histogramEntry (tg', count) = anchor ! [href $ tagUri tags "" tg'] << display tg' +++ (" (" ++ show count ++ ")") putPackageTags :: DynamicPath -> ServerPartE Response putPackageTags dpath = do pkgname <- packageInPath dpath _ <- lookupPackageName pkgname -- TODO: necessary? putTags pkgname return $ toResponse $ Resource.XHtml $ hackagePage "Set tags" [toHtml "Put tags for ", packageNameLink pkgname] -- serve form for editing, to be received by putTags serveTagsForm :: DynamicPath -> ServerPartE Response serveTagsForm dpath = do pkgname <- packageInPath dpath currTags <- queryTagsForPackage pkgname let tagsStr = concat . intersperse ", " . map display . Set.toList $ currTags return $ toResponse $ Resource.XHtml $ hackagePage "Edit package tags" [paragraph << [toHtml "Set tags for ", packageNameLink pkgname], form ! [theclass "box", XHtml.method "post", action $ packageTagsUri tags "" pkgname] << [ hidden "_method" "PUT" , dlist . ddef . toHtml $ makeInput [thetype "text", value tagsStr] "tags" "Set tags to " , paragraph << input ! [thetype "submit", value "Set tags"] ]] {------------------------------------------------------------------------------- Search -------------------------------------------------------------------------------} data HtmlSearch = HtmlSearch { htmlSearchResources :: [Resource] } mkHtmlSearch :: HtmlUtilities -> ListFeature -> SearchFeature -> HtmlSearch mkHtmlSearch HtmlUtilities{..} ListFeature{makeItemList} SearchFeature{..} = HtmlSearch{..} where htmlSearchResources = [ (extendResource searchPackagesResource) { resourceGet = [("html", servePackageFind)] } ] servePackageFind :: DynamicPath -> ServerPartE Response servePackageFind _ = do (mtermsStr, offset, limit, mexplain) <- queryString $ (,,,) <$> optional (look "terms") <*> mplus (lookRead "offset") (pure 0) <*> mplus (lookRead "limit") (pure 100) <*> optional (look "explain") let explain = isJust mexplain case mtermsStr of Just termsStr | explain , terms <- words termsStr, not (null terms) -> do params <- queryString getSearchRankParameters results <- searchPackagesExplain params terms return $ toResponse $ Resource.XHtml $ hackagePage "Package search" $ [ toHtml $ paramsForm params termsStr , resetParamsForm termsStr , toHtml $ explainResults results ] Just termsStr | terms <- words termsStr, not (null terms) -> do pkgnames <- searchPackages terms let (pageResults, moreResults) = splitAt limit (drop offset pkgnames) pkgDetails <- liftIO $ makeItemList pageResults return $ toResponse $ Resource.XHtml $ hackagePage "Package search" $ [ toHtml $ searchForm termsStr False , toHtml $ resultsArea pkgDetails offset limit moreResults termsStr , alternativeSearch ] _ -> return $ toResponse $ Resource.XHtml $ hackagePage "Text search" $ [ toHtml $ searchForm "" explain , alternativeSearch ] where resultsArea pkgDetails offset limit moreResults termsStr = [ h2 << "Results" , if offset == 0 then noHtml else paragraph << ("(" ++ show (fst range + 1) ++ " to " ++ show (snd range) ++ ")") , case pkgDetails of [] | offset == 0 -> toHtml "None" | otherwise -> toHtml "No more results" _ -> toHtml [ ulist ! [theclass "packages"] << map renderItem pkgDetails , if null moreResults then noHtml else anchor ! [href moreResultsLink] << "More results..." ] ] where range = (offset, offset + length pkgDetails) moreResultsLink = "/packages/search?" ++ "terms=" ++ escapeURIString isUnreserved termsStr ++ "&offset=" ++ show (offset + limit) ++ "&limit=" ++ show limit searchForm termsStr explain = [ h2 << "Package search" , form ! [XHtml.method "GET", action "/packages/search"] << [ input ! [value termsStr, name "terms", identifier "terms"] , toHtml " " , input ! [thetype "submit", value "Search"] , if explain then input ! [thetype "hidden", name "explain"] else noHtml ] ] alternativeSearch = paragraph << [ toHtml "Alternatively, if you are looking for a particular function then try " , anchor ! [href "http://holumbus.fh-wedel.de/hayoo/hayoo.html"] << "Hayoo" , toHtml " or " , anchor ! [href "http://www.haskell.org/hoogle/"] << "Hoogle" ] explainResults :: [(Search.Explanation PkgDocField PkgDocFeatures T.Text, PackageName)] -> [Html] explainResults results = [ h2 << "Results" , case results of [] -> noHtml ((explanation1, _):_) -> table ! [ border 1 ] << ( ( tr << tableHeader explanation1) : [ tr << tableRow explanation pkgname | (explanation, pkgname) <- results ]) ] where tableHeader Search.Explanation{..} = [ th << "package", th << "overall score" ] ++ [ th << (show term ++ " score") | (term, _score) <- termScores ] ++ [ th << (show term ++ " " ++ show field ++ " score") | (term, fieldScores) <- termFieldScores , (field, _score) <- fieldScores ] ++ [ th << (show feature ++ " score") | (feature, _score) <- nonTermScores ] tableRow Search.Explanation{..} pkgname = [ td << display pkgname, td << show overallScore ] ++ [ td << show score | (_term, score) <- termScores ] ++ [ td << show score | (_term, fieldScores) <- termFieldScores , (_field, score) <- fieldScores ] ++ [ td << show score | (_feature, score) <- nonTermScores ] getSearchRankParameters = do let defaults = defaultSearchRankParameters k1 <- lookRead "k1" `mplus` pure (paramK1 defaults) bs <- sequence [ lookRead ("b" ++ show field) `mplus` pure (paramB defaults field) | field <- Ix.range (minBound, maxBound :: PkgDocField) ] ws <- sequence [ lookRead ("w" ++ show field) `mplus` pure (paramFieldWeights defaults field) | field <- Ix.range (minBound, maxBound :: PkgDocField) ] fs <- sequence [ lookRead ("w" ++ show feature) `mplus` pure (paramFeatureWeights defaults feature) | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) ] let barr, warr :: Array PkgDocField Float barr = listArray (minBound, maxBound) bs warr = listArray (minBound, maxBound) ws farr = listArray (minBound, maxBound) fs return defaults { paramK1 = k1, paramB = (barr Array.!), paramFieldWeights = (warr Array.!), paramFeatureWeights = (farr Array.!) } paramsForm SearchRankParameters{..} termsStr = [ h2 << "Package search (tuning & explanation)" , form ! [XHtml.method "GET", action "/packages/search"] << [ input ! [value termsStr, name "terms", identifier "terms"] , toHtml " " , input ! [thetype "submit", value "Search"] , input ! [thetype "hidden", name "explain"] , simpleTable [] [] $ makeInput [thetype "text", value (show paramK1)] "k1" "K1 parameter" : [ makeInput [thetype "text", value (show (paramB field))] ("b" ++ fieldname) ("B param for " ++ fieldname) ++ makeInput [thetype "text", value (show (paramFieldWeights field)) ] ("w" ++ fieldname) ("Weight for " ++ fieldname) | field <- Ix.range (minBound, maxBound :: PkgDocField) , let fieldname = show field ] ++ [ makeInput [thetype "text", value (show (paramFeatureWeights feature)) ] ("w" ++ featurename) ("Weight for " ++ featurename) | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) , let featurename = show feature ] ] ] resetParamsForm termsStr = let SearchRankParameters{..} = defaultSearchRankParameters in form ! [XHtml.method "GET", action "/packages/search"] << (concat $ [ input ! [ thetype "submit", value "Reset parameters" ] , input ! [ thetype "hidden", name "terms", value termsStr ] , input ! [ thetype "hidden", name "explain" ] , input ! [ thetype "hidden", name "k1", value (show paramK1) ] ] : [ [ input ! [ thetype "hidden" , name ("b" ++ fieldname) , value (show (paramB field)) ] , input ! [ thetype "hidden" , name ("w" ++ fieldname) , value (show (paramFieldWeights field)) ] ] | field <- Ix.range (minBound, maxBound :: PkgDocField) , let fieldname = show field ] ++ [ [ input ! [ thetype "hidden" , name ("w" ++ featurename) , value (show (paramFeatureWeights feature)) ] ] | feature <- Ix.range (minBound, maxBound :: PkgDocFeatures) , let featurename = show feature ]) {------------------------------------------------------------------------------- Groups -------------------------------------------------------------------------------} htmlGroupResource :: UserFeature -> GroupResource -> [Resource] htmlGroupResource UserFeature{..} r@(GroupResource groupR userR getGroup) = [ (extendResource groupR) { resourceDesc = [ (GET, "Show list of users") , (POST, "Add a user to the group") ] , resourceGet = [ ("html", getList) ] , resourcePost = [ ("html", postUser) ] } , (extendResource userR) { resourceDesc = [ (DELETE, "Delete a user from the group") ] , resourceDelete = [ ("html", deleteFromGroup) ] } , (extendResourcePath "/edit" groupR) { resourceDesc = [ (GET, "Show edit form for the group") ] , resourceGet = [ ("html", getEditList) ] } ] where getList dpath = do group <- getGroup dpath userDb <- queryGetUserDb userlist <- liftIO . queryUserList $ group let unames = [ Users.userIdToName userDb uid | uid <- Group.enumerate userlist ] let baseUri = renderResource' groupR dpath return . toResponse . Resource.XHtml $ Pages.groupPage unames baseUri (False, False) (groupDesc group) getEditList dpath = do group <- getGroup dpath (canAdd, canDelete) <- lookupGroupEditAuth group userDb <- queryGetUserDb userlist <- liftIO . queryUserList $ group let unames = [ Users.userIdToName userDb uid | uid <- Group.enumerate userlist ] let baseUri = renderResource' groupR dpath return . toResponse . Resource.XHtml $ Pages.groupPage unames baseUri (canAdd, canDelete) (groupDesc group) postUser dpath = do group <- getGroup dpath groupAddUser group dpath goToList dpath deleteFromGroup dpath = do group <- getGroup dpath groupDeleteUser group dpath goToList dpath goToList dpath = seeOther (renderResource' (groupResource r) dpath) (toResponse ()) {------------------------------------------------------------------------------- Util -------------------------------------------------------------------------------} htmlUtilities :: CoreFeature -> TagsFeature -> HtmlUtilities htmlUtilities CoreFeature{coreResource} TagsFeature{tagsResource} = HtmlUtilities{..} where packageLink :: PackageId -> Html packageLink pkgid = anchor ! [href $ corePackageIdUri cores "" pkgid] << display pkgid packageNameLink :: PackageName -> Html packageNameLink pkgname = anchor ! [href $ corePackageNameUri cores "" pkgname] << display pkgname renderItem :: PackageItem -> Html renderItem item = li ! classes << [ packageNameLink pkgname , toHtml $ " " ++ ptype (itemHasLibrary item) (itemNumExecutables item) ++ ": " ++ itemDesc item , " (" +++ renderTags (itemTags item) +++ ")" ] where pkgname = itemName item ptype _ 0 = "library" ptype lib num = (if lib then "library and " else "") ++ (case num of 1 -> "program"; _ -> "programs") classes = case classList of [] -> []; _ -> [theclass $ unwords classList] classList = (case itemDeprecated item of Nothing -> []; _ -> ["deprecated"]) renderTags :: Set Tag -> [Html] renderTags tags = intersperse (toHtml ", ") (map (\tg -> anchor ! [href $ tagUri tagsResource "" tg] << display tg) $ Set.toList tags) cores = coreResource data HtmlUtilities = HtmlUtilities { packageLink :: PackageId -> Html , packageNameLink :: PackageName -> Html , renderItem :: PackageItem -> Html , renderTags :: Set Tag -> [Html] }
haskell-infra/hackage-server
Distribution/Server/Features/Html.hs
bsd-3-clause
79,192
0
33
25,263
15,876
8,340
7,536
-1
-1
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 @DsMonad@: monadery used in desugaring -} {-# LANGUAGE FlexibleInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- instance MonadThings is necessarily an orphan module Eta.DeSugar.DsMonad ( DsM, mapM, mapAndUnzipM, initDs, initDsTc, initTcDsForSolver, fixDs, foldlM, foldrM, whenGOptM, unsetGOptM, unsetWOptM, Applicative(..),(<$>), newLocalName, duplicateLocalDs, newSysLocalDs, newSysLocalsDs, newUniqueId, newFailLocalDs, newPredVarDs, getSrcSpanDs, putSrcSpanDs, mkPrintUnqualifiedDs, newUnique, UniqSupply, newUniqueSupply, getGhcModeDs, dsGetFamInstEnvs, dsGetStaticBindsVar, dsLookupGlobal, dsLookupGlobalId, dsDPHBuiltin, dsLookupTyCon, dsLookupDataCon, PArrBuiltin(..), dsLookupDPHRdrEnv, dsLookupDPHRdrEnv_maybe, dsInitPArrBuiltin, DsMetaEnv, DsMetaVal(..), dsGetMetaEnv, dsLookupMetaEnv, dsExtendMetaEnv, -- Getting and setting EvVars and term constraints in local environment getDictsDs, addDictsDs, getTmCsDs, addTmCsDs, -- Iterations for pm checking incrCheckPmIterDs, resetPmIterDs, -- Warnings DsWarning, warnDs, failWithDs, discardWarningsDs, -- Data types DsMatchContext(..), EquationInfo(..), MatchResult(..), DsWrapper, idDsWrapper, CanItFail(..), orFail ) where import Eta.TypeCheck.TcRnMonad import Eta.Types.FamInstEnv import Eta.Core.CoreSyn import Eta.HsSyn.HsSyn import Eta.Iface.TcIface import Eta.Iface.LoadIface import Eta.Main.Finder import Eta.Prelude.PrelNames import Eta.BasicTypes.RdrName import Eta.Main.HscTypes import Eta.Utils.Bag import Eta.BasicTypes.DataCon import Eta.Types.TyCon import Eta.BasicTypes.Id import Eta.BasicTypes.Module import Eta.Utils.Outputable import Eta.BasicTypes.SrcLoc import Eta.Types.Type import Eta.BasicTypes.UniqSupply import Eta.BasicTypes.Name import Eta.BasicTypes.NameEnv import Eta.BasicTypes.Var (EvVar) import Eta.Main.DynFlags import Eta.Main.ErrUtils import Eta.Utils.FastString import Eta.Utils.Maybes import GHC.Fingerprint import Eta.DeSugar.PmExpr import Data.IORef import Control.Monad import qualified Eta.LanguageExtensions as LangExt {- ************************************************************************ * * Data types for the desugarer * * ************************************************************************ -} data DsMatchContext = DsMatchContext (HsMatchContext Name) SrcSpan deriving () data EquationInfo = EqnInfo { eqn_pats :: [Pat Id], -- The patterns for an eqn eqn_rhs :: MatchResult } -- What to do after match instance Outputable EquationInfo where ppr (EqnInfo pats _) = ppr pats type DsWrapper = CoreExpr -> CoreExpr idDsWrapper :: DsWrapper idDsWrapper e = e -- The semantics of (match vs (EqnInfo wrap pats rhs)) is the MatchResult -- \fail. wrap (case vs of { pats -> rhs fail }) -- where vs are not bound by wrap -- A MatchResult is an expression with a hole in it data MatchResult = MatchResult CanItFail -- Tells whether the failure expression is used (CoreExpr -> DsM CoreExpr) -- Takes a expression to plug in at the -- failure point(s). The expression should -- be duplicatable! data CanItFail = CanFail | CantFail orFail :: CanItFail -> CanItFail -> CanItFail orFail CantFail CantFail = CantFail orFail _ _ = CanFail {- ************************************************************************ * * Monad functions * * ************************************************************************ -} -- Compatibility functions fixDs :: (a -> DsM a) -> DsM a fixDs = fixM type DsWarning = (SrcSpan, SDoc) -- Not quite the same as a WarnMsg, we have an SDoc here -- and we'll do the print_unqual stuff later on to turn it -- into a Doc. initDs :: HscEnv -> Module -> GlobalRdrEnv -> TypeEnv -> FamInstEnv -> DsM a -> IO (Messages, Maybe a) -- Print errors and warnings, if any arise initDs hsc_env mod rdr_env type_env fam_inst_env thing_inside = do { msg_var <- newIORef (emptyBag, emptyBag) ; static_binds_var <- newIORef [] ; pm_iter_var <- newIORef 0 ; let dflags = hsc_dflags hsc_env (ds_gbl_env, ds_lcl_env) = mkDsEnvs dflags mod rdr_env type_env fam_inst_env msg_var static_binds_var pm_iter_var ; either_res <- initTcRnIf 'd' hsc_env ds_gbl_env ds_lcl_env $ loadDAP $ initDPHBuiltins $ tryM thing_inside -- Catch exceptions (= errors during desugaring) -- Display any errors and warnings -- Note: if -Werror is used, we don't signal an error here. ; msgs <- readIORef msg_var ; let final_res | errorsFound dflags msgs = Nothing | otherwise = case either_res of Right res -> Just res Left exn -> pprPanic "initDs" (text (show exn)) -- The (Left exn) case happens when the thing_inside throws -- a UserError exception. Then it should have put an error -- message in msg_var, so we just discard the exception ; return (msgs, final_res) } where -- Extend the global environment with a 'GlobalRdrEnv' containing the exported entities of -- * 'Data.Array.Parallel' iff '-XParallelArrays' specified (see also 'checkLoadDAP'). -- * 'Data.Array.Parallel.Prim' iff '-fvectorise' specified. loadDAP thing_inside = do { dapEnv <- loadOneModule dATA_ARRAY_PARALLEL_NAME checkLoadDAP paErr ; dappEnv <- loadOneModule dATA_ARRAY_PARALLEL_PRIM_NAME (goptM Opt_Vectorise) veErr ; updGblEnv (\env -> env {ds_dph_env = dapEnv `plusOccEnv` dappEnv }) thing_inside } where loadOneModule :: ModuleName -- the module to load -> DsM Bool -- under which condition -> MsgDoc -- error message if module not found -> DsM GlobalRdrEnv -- empty if condition 'False' loadOneModule modname check err = do { doLoad <- check ; if not doLoad then return emptyGlobalRdrEnv else do { ; result <- liftIO $ findImportedModule hsc_env modname Nothing ; case result of Found _ mod -> loadModule err mod _ -> pprPgmError "Unable to use Data Parallel Haskell (DPH):" err } } paErr = ptext (sLit "To use ParallelArrays,") <+> specBackend $$ hint1 $$ hint2 veErr = ptext (sLit "To use -fvectorise,") <+> specBackend $$ hint1 $$ hint2 specBackend = ptext (sLit "you must specify a DPH backend package") hint1 = ptext (sLit "Look for packages named 'dph-lifted-*' with 'eta-pkg'") hint2 = ptext (sLit "You may need to install them with 'cabal install dph-examples'") initDPHBuiltins thing_inside = do { -- If '-XParallelArrays' given, we populate the builtin table for desugaring those ; doInitBuiltins <- checkLoadDAP ; if doInitBuiltins then dsInitPArrBuiltin thing_inside else thing_inside } checkLoadDAP = do { paEnabled <- xoptM LangExt.ParallelArrays ; return $ paEnabled && mod /= gHC_PARR' && moduleName mod /= dATA_ARRAY_PARALLEL_NAME } -- do not load 'Data.Array.Parallel' iff compiling 'base:GHC.PArr' or a -- module called 'dATA_ARRAY_PARALLEL_NAME'; see also the comments at the top -- of 'base:GHC.PArr' and 'Data.Array.Parallel' in the DPH libraries initDsTc :: DsM a -> TcM a initDsTc thing_inside = do { this_mod <- getModule ; tcg_env <- getGblEnv ; msg_var <- getErrsVar ; dflags <- getDynFlags ; static_binds_var <- liftIO $ newIORef [] ; pm_iter_var <- liftIO $ newIORef 0 ; let type_env = tcg_type_env tcg_env rdr_env = tcg_rdr_env tcg_env fam_inst_env = tcg_fam_inst_env tcg_env ds_envs = mkDsEnvs dflags this_mod rdr_env type_env fam_inst_env msg_var static_binds_var pm_iter_var ; setEnvs ds_envs thing_inside } initTcDsForSolver :: TcM a -> DsM (Messages, Maybe a) -- Spin up a TcM context so that we can run the constraint solver -- Returns any error messages generated by the constraint solver -- and (Just res) if no error happened; Nothing if an errror happened -- -- Simon says: I'm not very happy about this. We spin up a complete TcM monad -- only to immediately refine it to a TcS monad. -- Better perhaps to make TcS into its own monad, rather than building on TcS -- But that may in turn interact with plugins initTcDsForSolver thing_inside = do { (gbl, lcl) <- getEnvs ; hsc_env <- getTopEnv ; let DsGblEnv { ds_mod = mod , ds_fam_inst_env = fam_inst_env } = gbl DsLclEnv { dsl_loc = loc } = lcl ; liftIO $ initTc hsc_env HsSrcFile False mod loc $ updGblEnv (\tc_gbl -> tc_gbl { tcg_fam_inst_env = fam_inst_env }) $ thing_inside } mkDsEnvs :: DynFlags -> Module -> GlobalRdrEnv -> TypeEnv -> FamInstEnv -> IORef Messages -> IORef [(Fingerprint, (Id, CoreExpr))] -> IORef Int -> (DsGblEnv, DsLclEnv) mkDsEnvs dflags mod rdr_env type_env fam_inst_env msg_var static_binds_var pmvar = let if_genv = IfGblEnv { if_rec_types = Just (mod, return type_env) } if_lenv = mkIfLclEnv mod (ptext (sLit "GHC error in desugarer lookup in") <+> ppr mod) False real_span = realSrcLocSpan (mkRealSrcLoc (moduleNameFS (moduleName mod)) 1 1) gbl_env = DsGblEnv { ds_mod = mod , ds_fam_inst_env = fam_inst_env , ds_if_env = (if_genv, if_lenv) , ds_unqual = mkPrintUnqualified dflags rdr_env , ds_msgs = msg_var , ds_dph_env = emptyGlobalRdrEnv , ds_parr_bi = panic "DsMonad: uninitialised ds_parr_bi" , ds_static_binds = static_binds_var } lcl_env = DsLclEnv { dsl_meta = emptyNameEnv , dsl_loc = real_span , dsl_dicts = emptyBag , dsl_tm_cs = emptyBag , dsl_pm_iter = pmvar } in (gbl_env, lcl_env) -- Attempt to load the given module and return its exported entities if successful. -- loadModule :: SDoc -> Module -> DsM GlobalRdrEnv loadModule doc mod = do { env <- getGblEnv ; setEnvs (ds_if_env env) $ do { iface <- loadInterface doc mod ImportBySystem ; case iface of Failed err -> pprPanic "DsMonad.loadModule: failed to load" (err $$ doc) Succeeded iface -> return $ mkGlobalRdrEnv . gresFromAvails prov . mi_exports $ iface } } where prov = Imported [ImpSpec { is_decl = imp_spec, is_item = ImpAll }] imp_spec = ImpDeclSpec { is_mod = name, is_qual = True, is_dloc = wiredInSrcSpan, is_as = name, is_java = False } name = moduleName mod {- ************************************************************************ * * Operations in the monad * * ************************************************************************ And all this mysterious stuff is so we can occasionally reach out and grab one or more names. @newLocalDs@ isn't exported---exported functions are defined with it. The difference in name-strings makes it easier to read debugging output. -} -- Make a new Id with the same print name, but different type, and new unique newUniqueId :: Id -> Type -> DsM Id newUniqueId id = mkSysLocalM (occNameFS (nameOccName (idName id))) duplicateLocalDs :: Id -> DsM Id duplicateLocalDs old_local = do { uniq <- newUnique ; return (setIdUnique old_local uniq) } newPredVarDs :: PredType -> DsM Var newPredVarDs pred = newSysLocalDs pred newSysLocalDs, newFailLocalDs :: Type -> DsM Id newSysLocalDs = mkSysLocalM (fsLit "ds") newFailLocalDs = mkSysLocalM (fsLit "fail") newSysLocalsDs :: [Type] -> DsM [Id] newSysLocalsDs tys = mapM newSysLocalDs tys {- We can also reach out and either set/grab location information from the @SrcSpan@ being carried around. -} getGhcModeDs :: DsM GhcMode getGhcModeDs = getDynFlags >>= return . ghcMode -- | Get in-scope type constraints (pm check) getDictsDs :: DsM (Bag EvVar) getDictsDs = do { env <- getLclEnv; return (dsl_dicts env) } -- | Add in-scope type constraints (pm check) addDictsDs :: Bag EvVar -> DsM a -> DsM a addDictsDs ev_vars = updLclEnv (\env -> env { dsl_dicts = unionBags ev_vars (dsl_dicts env) }) -- | Get in-scope term constraints (pm check) getTmCsDs :: DsM (Bag SimpleEq) getTmCsDs = do { env <- getLclEnv; return (dsl_tm_cs env) } -- | Add in-scope term constraints (pm check) addTmCsDs :: Bag SimpleEq -> DsM a -> DsM a addTmCsDs tm_cs = updLclEnv (\env -> env { dsl_tm_cs = unionBags tm_cs (dsl_tm_cs env) }) -- | Check that we have not done more iterations -- than we are supposed to and inrease the counter -- | Increase the counter for elapsed pattern match check iterations. -- If the current counter is already over the limit, fail incrCheckPmIterDs :: DsM () incrCheckPmIterDs = do env <- getLclEnv cnt <- readTcRef (dsl_pm_iter env) max_iters <- maxPmCheckIterations <$> getDynFlags if cnt >= max_iters then failM else updTcRef (dsl_pm_iter env) (+1) -- | Reset the counter for pattern match check iterations to zero resetPmIterDs :: DsM () resetPmIterDs = do { env <- getLclEnv; writeTcRef (dsl_pm_iter env) 0 } getSrcSpanDs :: DsM SrcSpan getSrcSpanDs = do { env <- getLclEnv ; return (RealSrcSpan (dsl_loc env)) } putSrcSpanDs :: SrcSpan -> DsM a -> DsM a putSrcSpanDs (UnhelpfulSpan {}) thing_inside = thing_inside putSrcSpanDs (RealSrcSpan real_span) thing_inside = updLclEnv (\ env -> env {dsl_loc = real_span}) thing_inside -- | Emit a warning for the current source location -- NB: Warns whether or not -Wxyz is set warnDs :: WarnReason -> SDoc -> DsM () warnDs reason warn = do { env <- getGblEnv ; loc <- getSrcSpanDs ; dflags <- getDynFlags ; let msg = makeIntoWarning reason $ mkWarnMsg dflags loc (ds_unqual env) warn ; updMutVar (ds_msgs env) (\ (w,e) -> (w `snocBag` msg, e)) } failWithDs :: SDoc -> DsM a failWithDs err = do { env <- getGblEnv ; loc <- getSrcSpanDs ; dflags <- getDynFlags ; let msg = mkErrMsg dflags loc (ds_unqual env) err ; updMutVar (ds_msgs env) (\ (w,e) -> (w, e `snocBag` msg)) ; failM } mkPrintUnqualifiedDs :: DsM PrintUnqualified mkPrintUnqualifiedDs = ds_unqual <$> getGblEnv instance {-# OVERLAPS #-} MonadThings (IOEnv (Env DsGblEnv DsLclEnv)) where lookupThing = dsLookupGlobal dsLookupGlobal :: Name -> DsM TyThing -- Very like TcEnv.tcLookupGlobal dsLookupGlobal name = do { env <- getGblEnv ; setEnvs (ds_if_env env) (tcIfaceGlobal name) } dsLookupGlobalId :: Name -> DsM Id dsLookupGlobalId name = tyThingId <$> dsLookupGlobal name -- |Get a name from "Data.Array.Parallel" for the desugarer, from the 'ds_parr_bi' component of the -- global desugerar environment. -- dsDPHBuiltin :: (PArrBuiltin -> a) -> DsM a dsDPHBuiltin sel = (sel . ds_parr_bi) <$> getGblEnv dsLookupTyCon :: Name -> DsM TyCon dsLookupTyCon name = tyThingTyCon <$> dsLookupGlobal name dsLookupDataCon :: Name -> DsM DataCon dsLookupDataCon name = tyThingDataCon <$> dsLookupGlobal name -- |Lookup a name exported by 'Data.Array.Parallel.Prim' or 'Data.Array.Parallel.Prim'. -- Panic if there isn't one, or if it is defined multiple times. dsLookupDPHRdrEnv :: OccName -> DsM Name dsLookupDPHRdrEnv occ = liftM (fromMaybe (pprPanic nameNotFound (ppr occ))) $ dsLookupDPHRdrEnv_maybe occ where nameNotFound = "Name not found in 'Data.Array.Parallel' or 'Data.Array.Parallel.Prim':" -- |Lookup a name exported by 'Data.Array.Parallel.Prim' or 'Data.Array.Parallel.Prim', -- returning `Nothing` if it's not defined. Panic if it's defined multiple times. dsLookupDPHRdrEnv_maybe :: OccName -> DsM (Maybe Name) dsLookupDPHRdrEnv_maybe occ = do { env <- ds_dph_env <$> getGblEnv ; let gres = lookupGlobalRdrEnv env occ ; case gres of [] -> return $ Nothing [gre] -> return $ Just $ gre_name gre _ -> pprPanic multipleNames (ppr occ) } where multipleNames = "Multiple definitions in 'Data.Array.Parallel' and 'Data.Array.Parallel.Prim':" -- Populate 'ds_parr_bi' from 'ds_dph_env'. -- dsInitPArrBuiltin :: DsM a -> DsM a dsInitPArrBuiltin thing_inside = do { lengthPVar <- externalVar (fsLit "lengthP") ; replicatePVar <- externalVar (fsLit "replicateP") ; singletonPVar <- externalVar (fsLit "singletonP") ; mapPVar <- externalVar (fsLit "mapP") ; filterPVar <- externalVar (fsLit "filterP") ; zipPVar <- externalVar (fsLit "zipP") ; crossMapPVar <- externalVar (fsLit "crossMapP") ; indexPVar <- externalVar (fsLit "!:") ; emptyPVar <- externalVar (fsLit "emptyP") ; appPVar <- externalVar (fsLit "+:+") -- ; enumFromToPVar <- externalVar (fsLit "enumFromToP") -- ; enumFromThenToPVar <- externalVar (fsLit "enumFromThenToP") ; enumFromToPVar <- return arithErr ; enumFromThenToPVar <- return arithErr ; updGblEnv (\env -> env {ds_parr_bi = PArrBuiltin { lengthPVar = lengthPVar , replicatePVar = replicatePVar , singletonPVar = singletonPVar , mapPVar = mapPVar , filterPVar = filterPVar , zipPVar = zipPVar , crossMapPVar = crossMapPVar , indexPVar = indexPVar , emptyPVar = emptyPVar , appPVar = appPVar , enumFromToPVar = enumFromToPVar , enumFromThenToPVar = enumFromThenToPVar } }) thing_inside } where externalVar :: FastString -> DsM Var externalVar fs = dsLookupDPHRdrEnv (mkVarOccFS fs) >>= dsLookupGlobalId arithErr = panic "Arithmetic sequences have to wait until we support type classes" dsGetFamInstEnvs :: DsM FamInstEnvs -- Gets both the external-package inst-env -- and the home-pkg inst env (includes module being compiled) dsGetFamInstEnvs = do { eps <- getEps; env <- getGblEnv ; return (eps_fam_inst_env eps, ds_fam_inst_env env) } dsGetMetaEnv :: DsM (NameEnv DsMetaVal) dsGetMetaEnv = do { env <- getLclEnv; return (dsl_meta env) } dsLookupMetaEnv :: Name -> DsM (Maybe DsMetaVal) dsLookupMetaEnv name = do { env <- getLclEnv; return (lookupNameEnv (dsl_meta env) name) } dsExtendMetaEnv :: DsMetaEnv -> DsM a -> DsM a dsExtendMetaEnv menv thing_inside = updLclEnv (\env -> env { dsl_meta = dsl_meta env `plusNameEnv` menv }) thing_inside -- | Gets a reference to the SPT entries created so far. dsGetStaticBindsVar :: DsM (IORef [(Fingerprint, (Id,CoreExpr))]) dsGetStaticBindsVar = fmap ds_static_binds getGblEnv discardWarningsDs :: DsM a -> DsM a -- Ignore warnings inside the thing inside; -- used to ignore inaccessable cases etc. inside generated code discardWarningsDs thing_inside = do { env <- getGblEnv ; old_msgs <- readTcRef (ds_msgs env) ; result <- thing_inside -- Revert messages to old_msgs ; writeTcRef (ds_msgs env) old_msgs ; return result }
rahulmutt/ghcvm
compiler/Eta/DeSugar/DsMonad.hs
bsd-3-clause
21,540
2
18
6,699
4,075
2,204
1,871
335
5
{-| Module : IRTS.CodegenJavaScript Description : The JavaScript code generator. Copyright : License : BSD3 Maintainer : The Idris Community. -} {-# LANGUAGE OverloadedStrings, PatternGuards #-} module IRTS.CodegenJavaScript (codegenJavaScript , codegenNode , JSTarget(..) ) where import Data.Char import Data.Text (Text) import qualified Data.Text as T import IRTS.CodegenCommon import IRTS.JavaScript.Codegen import System.Directory import System.FilePath data JSTarget = Node | JavaScript deriving Eq htmlHeader :: Text htmlHeader = T.concat [ "<html>\n" , " <head>\n" , " <meta charset='utf-8'>\n" , " </head>\n" , " <body>\n" , " <script type='text/javascript'>\n" ] htmlFooter :: Text htmlFooter = T.concat [ "\n </script>\n" , " </body>\n" , "</html>" ] codegenJavaScript :: CodeGenerator codegenJavaScript ci = let (h, f) = if (map toLower $ takeExtension $ outputFile ci) == ".html" then (htmlHeader, htmlFooter) else ("","") in codegenJs (CGConf { header = h , footer = f , jsbnPath = "jsbn/jsbn-browser.js" , extraRunTime = "Runtime-javascript.js" } ) ci codegenNode :: CodeGenerator codegenNode ci = do codegenJs (CGConf { header = "#!/usr/bin/env node\n" , footer = "" , jsbnPath = "jsbn/jsbn-browser.js" , extraRunTime = "Runtime-node.js" } ) ci setPermissions (outputFile ci) (emptyPermissions { readable = True , executable = True , writable = True })
markuspf/Idris-dev
src/IRTS/CodegenJavaScript.hs
bsd-3-clause
2,032
0
14
859
330
196
134
46
2
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# LANGUAGE FlexibleInstances, GADTs, OverloadedStrings, CPP #-} module Data.JSTarget.Print () where import Prelude hiding (LT, GT) import Data.JSTarget.AST import Data.JSTarget.Op import Data.JSTarget.PP as PP import Data.ByteString.Builder import Control.Monad import Data.Char import Numeric (showHex) import qualified Data.ByteString.Char8 as BS import qualified Data.ByteString.UTF8 as BS instance Pretty Var where pp (Foreign name) = do put name doComment <- getOpt externalAnnotation when doComment . put $ byteString "/* EXTERNAL */" pp (Internal name@(Name _ _) comment _) = do hsnames <- getOpt preserveNames if hsnames then put $ buildStgName name else do pp name doComment <- getOpt nameComments when doComment $ do when (not $ BS.null comment) $ do put $ byteString "/* " put comment put $ byteString " */" instance Pretty Name where pp name = finalNameFor name >>= put . buildFinalName instance Pretty LHS where pp (NewVar _ v) = "var " .+. pp v pp (LhsExp _ ex) = pp ex instance Pretty Lit where pp (LNum d) = put d pp (LStr s) = "\"" .+. put (fixQuotes $ BS.toString s) .+. "\"" where fixQuotes ('\\':xs) = "\\\\" ++ fixQuotes xs fixQuotes ('"':xs) = '\\':'"' : fixQuotes xs fixQuotes ('\'':xs) = '\\':'\'' : fixQuotes xs fixQuotes ('\r':xs) = '\\':'r' : fixQuotes xs fixQuotes ('\n':xs) = '\\':'n' : fixQuotes xs fixQuotes (x:xs) | ord x <= 127 = x : fixQuotes xs | otherwise = toHex x ++ fixQuotes xs fixQuotes _ = [] pp (LBool b) = put b pp (LInt n) = put n pp (LNull) = "null" -- | Generate a Haskell \uXXXX escape sequence for a char if it's >127. toHex :: Char -> String toHex c = case ord c of n | n < 127 -> [c] | otherwise -> "\\u" ++ exactlyFour (showHex (n `rem` 65536) "") -- | Truncate and pad a string to exactly four characters. '0' is used for padding. exactlyFour :: String -> String exactlyFour s = pad (4-len) $ drop (len-4) s where len = length s pad 0 cs = cs pad n cs = '0' : pad (n-1) cs -- | Default separator; comma followed by space, if spaces are enabled. sep :: PP () sep = "," .+. sp instance Pretty Exp where pp (Var v) = pp v pp (Lit l) = pp l pp (JSLit l) = put l pp (Not ex) = do case neg ex of Just ex' -> pp ex' _ -> if expPrec (Not ex) > expPrec ex then "!(" .+. pp ex .+. ")" else "!" .+. pp ex pp bop@(BinOp _ _ _) = case norm bop of BinOp op a b -> opParens op a b ex -> pp ex pp (Fun args body) = do "function(" .+. ppList sep args .+. "){" .+. newl indent $ pp body ind .+. "}" pp (Call _ call f args) = do case call of Normal True -> "B(" .+. normalCall .+. ")" Normal False -> normalCall Fast True -> "B(" .+. fastCall .+. ")" Fast False -> fastCall Method m -> pp f .+. put (BS.cons '.' m) .+. "(" .+. ppList sep args .+. ")" where normalCall = "A(" .+. pp f .+. ",[" .+. ppList sep args .+. "])" fastCall = ppCallFun f .+. "(" .+. ppList sep args .+. ")" ppCallFun fun@(Fun _ _) = "(" .+. pp fun .+. ")" ppCallFun fun = pp fun pp e@(Index arr ix) = do if expPrec e > expPrec arr then "(" .+. pp arr .+. ")" else pp arr "[" .+. pp ix .+. "]" pp (Arr exs) = do "[" .+. ppList sep exs .+. "]" pp (AssignEx l r) = do pp l .+. sp .+. "=" .+. sp .+. pp r pp e@(IfEx c th el) = do if expPrec e > expPrec c then "(" .+. pp c .+. ")" else pp c sp .+. "?" .+. sp .+. pp th .+. sp .+. ":" .+. sp .+. pp el pp (Eval x) = do "E(" .+. pp x .+. ")" pp (Thunk True x) = do "new T(function(){" .+. newl .+. indent (pp x) .+. "})" pp (Thunk False x) = do "new T(function(){" .+. newl .+. indent (pp x) .+. "},1)" instance Pretty (Var, Exp) where pp (v, ex) = pp v .+. sp .+. "=" .+. sp .+. pp ex instance Pretty Bool where pp True = "true" pp False = "false" -- | Print a series of NewVars at once, to avoid unnecessary "var" keywords. ppAssigns :: Stm -> PP () ppAssigns stm = do line $ "var " .+. ppList sep assigns .+. ";" pp next where (assigns, next) = gather [] stm gather as (Assign (NewVar _ v) ex nxt) = gather ((v, ex):as) nxt gather as nxt = (reverse as, nxt) -- | Returns the final statement in a case branch. finalStm :: Stm -> PP Stm finalStm s = case s of Assign _ _ s' -> finalStm s' Case _ _ _ next -> finalStm next Forever s' -> finalStm s' _ -> return s instance Pretty Stm where pp (Case cond def alts next) = do prettyCase cond def alts pp next pp (Forever stm) = do line "while(1){" indent $ pp stm line "}" pp s@(Assign lhs ex next) = do case lhs of _ | lhs == blackHole -> line (pp ex .+. ";") >> pp next NewVar _ _ -> ppAssigns s LhsExp _ _ -> line (pp lhs .+. sp .+. "=" .+. sp .+. pp ex .+. ";") >> pp next pp (Return ex) = do line $ "return " .+. pp ex .+. ";" pp (Cont) = do line "continue;" pp (Stop) = do return () pp (Tailcall call) = do line $ "return new F(function(){return " .+. pp call .+. ";});" pp (ThunkRet ex) = do line $ "return " .+. pp ex .+. ";" neg :: Exp -> Maybe Exp neg (BinOp Eq a b) = Just $ BinOp Neq a b neg (BinOp Neq a b) = Just $ BinOp Eq a b neg (BinOp GT a b) = Just $ BinOp LTE a b neg (BinOp LT a b) = Just $ BinOp GTE a b neg (BinOp GTE a b) = Just $ BinOp LT a b neg (BinOp LTE a b) = Just $ BinOp GT a b neg _ = Nothing -- | Turn eligible case statements into if statements. prettyCase :: Exp -> Stm -> [Alt] -> PP () prettyCase cond def [(con, branch)] = do case (def, branch) of (_, Stop) -> do line $ "if(" .+. pp (neg' (test con)) .+. "){" indent $ pp def line "}" (Stop, _) -> do line $ "if(" .+. pp (test con) .+. "){" indent $ pp branch line "}" _ -> do line $ "if(" .+. pp (test con) .+."){" indent $ pp branch line "}else{" indent $ pp def line "}" where test (Lit (LBool True)) = cond test (Lit (LBool False)) = Not cond test (Lit (LNum 0)) = Not cond test c = BinOp Eq cond c neg' c = maybe (Not c) id (neg c) prettyCase _ def [] = do pp def prettyCase cond def alts = do line $ "switch(" .+. pp cond .+. "){" indent $ do mapM_ pp alts line $ "default:" indent $ pp def line "}" instance Pretty Alt where pp (con, branch) = do line $ "case " .+. pp con .+. ":" indent $ do pp branch s <- finalStm branch case s of Return _ -> return () Cont -> return () _ -> line "break;"; opParens :: BinOp -> Exp -> Exp -> PP () opParens Sub a (BinOp Sub (Lit (LNum 0)) b) = opParens Add a b opParens Sub a (Lit (LNum n)) | n < 0 = opParens Add a (Lit (LNum (-n))) opParens Sub (Lit (LNum 0)) b = case b of BinOp _ _ _ -> " -(" .+. pp b .+. ")" _ -> " -" .+. pp b opParens op a b = do let bparens = case b of Lit (LNum n) | n < 0 -> \x -> "(".+. pp x .+. ")" _ -> parensR parensL a .+. put (stringUtf8 $ show op) .+. bparens b where parensL x = if expPrec x < opPrec op then "(" .+. pp x .+. ")" else pp x parensR x = if expPrec x <= opPrec op then "(" .+. pp x .+. ")" else pp x -- | Normalize an operator expression by shifting parentheses to the left for -- all associative operators and eliminating comparisons with true/false. norm :: Exp -> Exp norm (BinOp op a (BinOp op' b c)) | op == op' && opIsAssoc op = norm (BinOp op (BinOp op a b) c) norm (BinOp Eq a (Lit (LBool True))) = norm a norm (BinOp Eq (Lit (LBool True)) b) = norm b norm (BinOp Eq a (Lit (LBool False))) = Not (norm a) norm (BinOp Eq (Lit (LBool False)) b) = Not (norm b) norm (BinOp Neq a (Lit (LBool True))) = Not (norm a) norm (BinOp Neq (Lit (LBool True)) b) = Not (norm b) norm (BinOp Neq a (Lit (LBool False))) = norm a norm (BinOp Neq (Lit (LBool False)) b) = norm b norm e = e
beni55/haste-compiler
src/Data/JSTarget/Print.hs
bsd-3-clause
8,458
0
19
2,723
3,737
1,796
1,941
242
6
{-# OPTIONS_HADDOCK show-extensions #-} {-# LANGUAGE DataKinds #-} {- | Copyright : Copyright (C) 2006-2018 Bjorn Buckwalter License : BSD3 Maintainer : [email protected] Stability : Stable Portability: GHC only = Summary This module defines type synonyms for common dimensionalities and the associated quantity types. Additional dimensionalities and quantity types will be added on an as-needed basis. The definitions in this module are grouped so that a type synonym for the dimensionality is defined first in terms of base dimension exponents. Then a type synonym for the corresponding quantity type is defined. If there are several quantity types with the same dimensionality type synonyms are provided for each quantity type. = References 1. #note1# http://physics.nist.gov/Pubs/SP811/ -} module Numeric.Units.Dimensional.Quantities ( -- * Quantities from the NIST Guide -- $nist-guide Area, Volume, Velocity, Acceleration, WaveNumber, MassDensity, Density, SpecificVolume, CurrentDensity, MagneticFieldStrength, AmountOfSubstanceConcentration, Concentration, Luminance, -- $table3 PlaneAngle, SolidAngle, Frequency, Force, Pressure, Stress, Energy, Work, QuantityOfHeat, Power, RadiantFlux, ElectricCharge, QuantityOfElectricity, ElectricPotential, PotentialDifference, ElectromotiveForce, Capacitance, ElectricResistance, ElectricConductance, MagneticFlux, MagneticFluxDensity, Inductance, LuminousFlux, Illuminance, CelsiusTemperature, Activity, AbsorbedDose, SpecificEnergy, Kerma, DoseEquivalent, AmbientDoseEquivalent, DirectionalDoseEquivalent, PersonalDoseEquivalent, EquivalentDose, CatalyticActivity, -- $table4 AngularVelocity, AngularAcceleration, DynamicViscosity, MomentOfForce, SurfaceTension, HeatFluxDensity, Irradiance, RadiantIntensity, Radiance, HeatCapacity, Entropy, SpecificHeatCapacity, SpecificEntropy, ThermalConductivity, EnergyDensity, ElectricFieldStrength, ElectricChargeDensity, ElectricFluxDensity, Permittivity, Permeability, MolarEnergy, MolarEntropy, MolarHeatCapacity, Exposure, AbsorbedDoseRate, -- * Quantities not from the NIST Guide -- $not-nist-guide Impulse, Momentum, MassFlow, VolumeFlow, GravitationalParameter, KinematicViscosity, FirstMassMoment, MomentOfInertia, AngularMomentum, ThermalResistivity, ThermalConductance, ThermalResistance, HeatTransferCoefficient, ThermalAdmittance, ThermalInsulance, Jerk, Angle, Thrust, Torque, EnergyPerUnitMass, -- * Powers of Unit Lengths -- $powers-of-length-units square, cubic, -- * Dimension Aliases -- $dimension-aliases DArea, DVolume, DVelocity, DAcceleration, DWaveNumber, DMassDensity, DDensity, DSpecificVolume, DCurrentDensity, DMagneticFieldStrength, DAmountOfSubstanceConcentration, DConcentration, DLuminance, DPlaneAngle, DSolidAngle, DFrequency, DForce, DPressure, DStress, DEnergy, DWork, DQuantityOfHeat, DPower, DRadiantFlux, DElectricCharge, DQuantityOfElectricity, DElectricPotential, DPotentialDifference, DElectromotiveForce, DCapacitance, DElectricResistance, DElectricConductance, DMagneticFlux, DMagneticFluxDensity, DInductance, DLuminousFlux, DIlluminance, DCelsiusTemperature, DActivity, DAbsorbedDose, DSpecificEnergy, DKerma, DDoseEquivalent, DAmbientDoseEquivalent, DDirectionalDoseEquivalent, DPersonalDoseEquivalent, DEquivalentDose, DCatalyticActivity, DAngularVelocity, DAngularAcceleration, DDynamicViscosity, DMomentOfForce, DSurfaceTension, DHeatFluxDensity, DIrradiance, DRadiantIntensity, DRadiance, DHeatCapacity, DEntropy, DSpecificHeatCapacity, DSpecificEntropy, DThermalConductivity, DEnergyDensity, DElectricFieldStrength, DElectricChargeDensity, DElectricFluxDensity, DPermittivity, DPermeability, DMolarEnergy, DMolarEntropy, DMolarHeatCapacity, DExposure, DAbsorbedDoseRate, DImpulse, DMomentum, DMassFlow, DVolumeFlow, DGravitationalParameter, DKinematicViscosity, DFirstMassMoment, DMomentOfInertia, DAngularMomentum, DThermalResistivity, DThermalConductance, DThermalResistance, DHeatTransferCoefficient, DThermalAdmittance, DThermalInsulance, DJerk, DAngle, DThrust, DTorque, DEnergyPerUnitMass ) where import Numeric.Units.Dimensional ( Dimension (Dim), Quantity, Dimensionless , DOne, DLuminousIntensity, DThermodynamicTemperature , Unit, DLength, (^) -- Used only for 'square' and 'cubic'. , Metricality(..) ) import Numeric.NumType.DK.Integers ( TypeInt (Neg3, Neg2, Neg1, Zero, Pos1, Pos2, Pos3, Pos4) , pos2, pos3 -- Used only for 'square' and 'cubic'. ) import Prelude (Fractional) import Data.Typeable {- $nist-guide The following quantities are all from the NIST publication "Guide for the Use of the International System of Units (SI)" <#note1 [1]>. Any chapters, sections or tables referenced are from <#note1 [1]> unless otherwise specified. For lack of better organization we provide definitions grouped by table in <#note1 [1]>. == Table 2 "Examples of SI derived units expressed in terms of SI base units." -} {- $dimension-aliases For each 'Quantity' alias supplied above, we also supply a corresponding 'Dimension' alias. These dimension aliases may be convenient for supplying type signatures for 'Unit's or for other type-level dimensional programming. -} type DArea = 'Dim 'Pos2 'Zero 'Zero 'Zero 'Zero 'Zero 'Zero type Area = Quantity DArea type DVolume = 'Dim 'Pos3 'Zero 'Zero 'Zero 'Zero 'Zero 'Zero type Volume = Quantity DVolume type DVelocity = 'Dim 'Pos1 'Zero 'Neg1 'Zero 'Zero 'Zero 'Zero type Velocity = Quantity DVelocity type DAcceleration = 'Dim 'Pos1 'Zero 'Neg2 'Zero 'Zero 'Zero 'Zero type Acceleration = Quantity DAcceleration type DWaveNumber = 'Dim 'Neg1 'Zero 'Zero 'Zero 'Zero 'Zero 'Zero type WaveNumber = Quantity DWaveNumber type DMassDensity = 'Dim 'Neg3 'Pos1 'Zero 'Zero 'Zero 'Zero 'Zero type DDensity = DMassDensity type MassDensity = Quantity DMassDensity type Density = MassDensity -- Short name. type DSpecificVolume = 'Dim 'Pos3 'Neg1 'Zero 'Zero 'Zero 'Zero 'Zero type SpecificVolume = Quantity DSpecificVolume type DCurrentDensity = 'Dim 'Neg2 'Zero 'Zero 'Pos1 'Zero 'Zero 'Zero type CurrentDensity = Quantity DCurrentDensity type DMagneticFieldStrength = 'Dim 'Neg1 'Zero 'Zero 'Pos1 'Zero 'Zero 'Zero type MagneticFieldStrength = Quantity DMagneticFieldStrength type DAmountOfSubstanceConcentration = 'Dim 'Neg3 'Zero 'Zero 'Zero 'Zero 'Pos1 'Zero type DConcentration = DAmountOfSubstanceConcentration type AmountOfSubstanceConcentration = Quantity DAmountOfSubstanceConcentration type Concentration = AmountOfSubstanceConcentration -- Short name. type DLuminance = 'Dim 'Neg2 'Zero 'Zero 'Zero 'Zero 'Zero 'Pos1 type Luminance = Quantity DLuminance {- $table3 == Table 3 SI coherent derived units with special names and symbols. -} type DPlaneAngle = DOne type PlaneAngle = Dimensionless type DSolidAngle = DOne type SolidAngle = Dimensionless type DFrequency = 'Dim 'Zero 'Zero 'Neg1 'Zero 'Zero 'Zero 'Zero type Frequency = Quantity DFrequency type DForce = 'Dim 'Pos1 'Pos1 'Neg2 'Zero 'Zero 'Zero 'Zero type Force = Quantity DForce type DPressure = 'Dim 'Neg1 'Pos1 'Neg2 'Zero 'Zero 'Zero 'Zero type DStress = DPressure type Pressure = Quantity DPressure type Stress = Quantity DStress type DEnergy = 'Dim 'Pos2 'Pos1 'Neg2 'Zero 'Zero 'Zero 'Zero type DWork = DEnergy type DQuantityOfHeat = DEnergy type Energy = Quantity DEnergy type Work = Quantity DWork type QuantityOfHeat = Quantity DQuantityOfHeat type DPower = 'Dim 'Pos2 'Pos1 'Neg3 'Zero 'Zero 'Zero 'Zero type DRadiantFlux = DPower type Power = Quantity DPower type RadiantFlux = Quantity DRadiantFlux type DElectricCharge = 'Dim 'Zero 'Zero 'Pos1 'Pos1 'Zero 'Zero 'Zero type DQuantityOfElectricity = DElectricCharge type ElectricCharge = Quantity DElectricCharge type QuantityOfElectricity = Quantity DQuantityOfElectricity type DElectricPotential = 'Dim 'Pos2 'Pos1 'Neg3 'Neg1 'Zero 'Zero 'Zero type DPotentialDifference = DElectricPotential type DElectromotiveForce = DElectricPotential type ElectricPotential = Quantity DElectricPotential type PotentialDifference = Quantity DPotentialDifference type ElectromotiveForce = Quantity DElectromotiveForce type DCapacitance = 'Dim 'Neg2 'Neg1 'Pos4 'Pos2 'Zero 'Zero 'Zero type Capacitance = Quantity DCapacitance type DElectricResistance = 'Dim 'Pos2 'Pos1 'Neg3 'Neg2 'Zero 'Zero 'Zero type ElectricResistance = Quantity DElectricResistance type DElectricConductance = 'Dim 'Neg2 'Neg1 'Pos3 'Pos2 'Zero 'Zero 'Zero type ElectricConductance = Quantity DElectricConductance type DMagneticFlux = 'Dim 'Pos2 'Pos1 'Neg2 'Neg1 'Zero 'Zero 'Zero type MagneticFlux = Quantity DMagneticFlux type DMagneticFluxDensity = 'Dim 'Zero 'Pos1 'Neg2 'Neg1 'Zero 'Zero 'Zero type MagneticFluxDensity = Quantity DMagneticFluxDensity type DInductance = 'Dim 'Pos2 'Pos1 'Neg2 'Neg2 'Zero 'Zero 'Zero type Inductance = Quantity DInductance type DLuminousFlux = DLuminousIntensity type LuminousFlux = Quantity DLuminousFlux type DIlluminance = 'Dim 'Neg2 'Zero 'Zero 'Zero 'Zero 'Zero 'Pos1 type Illuminance = Quantity DIlluminance type DCelsiusTemperature = DThermodynamicTemperature type CelsiusTemperature = Quantity DCelsiusTemperature type DActivity = DFrequency -- Activity of a radionuclide. type Activity = Quantity DActivity type DAbsorbedDose = 'Dim 'Pos2 'Zero 'Neg2 'Zero 'Zero 'Zero 'Zero type DSpecificEnergy = DAbsorbedDose type DKerma = DAbsorbedDose type AbsorbedDose = Quantity DAbsorbedDose type SpecificEnergy = Quantity DSpecificEnergy -- Specific energy imparted. type Kerma = Quantity DKerma type DDoseEquivalent = DAbsorbedDose type DAmbientDoseEquivalent = DDoseEquivalent type DDirectionalDoseEquivalent = DDoseEquivalent type DPersonalDoseEquivalent = DDoseEquivalent type DEquivalentDose = DDoseEquivalent type DoseEquivalent = Quantity DDoseEquivalent type AmbientDoseEquivalent = DoseEquivalent type DirectionalDoseEquivalent = DoseEquivalent type PersonalDoseEquivalent = DoseEquivalent type EquivalentDose = DoseEquivalent type DCatalyticActivity = 'Dim 'Zero 'Zero 'Neg1 'Zero 'Zero 'Pos1 'Zero type CatalyticActivity = Quantity DCatalyticActivity {- $table4 == Table 4 "Examples of SI coherent derived units expressed with the aid of SI derived units having special names and symbols." We use the same grouping as for table 2. -} type DAngularVelocity = DFrequency type AngularVelocity = Quantity DAngularVelocity type DAngularAcceleration = 'Dim 'Zero 'Zero 'Neg2 'Zero 'Zero 'Zero 'Zero type AngularAcceleration = Quantity DAngularAcceleration type DDynamicViscosity = 'Dim 'Neg1 'Pos1 'Neg1 'Zero 'Zero 'Zero 'Zero type DynamicViscosity = Quantity DDynamicViscosity type DMomentOfForce = DEnergy type MomentOfForce = Quantity DMomentOfForce type DSurfaceTension = 'Dim 'Zero 'Pos1 'Neg2 'Zero 'Zero 'Zero 'Zero type SurfaceTension = Quantity DSurfaceTension type DHeatFluxDensity = 'Dim 'Zero 'Pos1 'Neg3 'Zero 'Zero 'Zero 'Zero type DIrradiance = DHeatFluxDensity type HeatFluxDensity = Quantity DHeatFluxDensity type Irradiance = Quantity DIrradiance type DRadiantIntensity = DPower type RadiantIntensity = Quantity DRadiantIntensity type DRadiance = DIrradiance type Radiance = Quantity DRadiance type DHeatCapacity = 'Dim 'Pos2 'Pos1 'Neg2 'Zero 'Neg1 'Zero 'Zero type DEntropy = DHeatCapacity type HeatCapacity = Quantity DHeatCapacity type Entropy = Quantity DEntropy type DSpecificHeatCapacity = 'Dim 'Pos2 'Zero 'Neg2 'Zero 'Neg1 'Zero 'Zero type DSpecificEntropy = DSpecificHeatCapacity type SpecificHeatCapacity = Quantity DSpecificHeatCapacity type SpecificEntropy = Quantity DSpecificEntropy {- Specific energy was already defined in table 3. -} type DThermalConductivity = 'Dim 'Pos1 'Pos1 'Neg3 'Zero 'Neg1 'Zero 'Zero type ThermalConductivity = Quantity DThermalConductivity type DEnergyDensity = DPressure type EnergyDensity = Quantity DEnergyDensity type DElectricFieldStrength = 'Dim 'Pos1 'Pos1 'Neg3 'Neg1 'Zero 'Zero 'Zero type ElectricFieldStrength = Quantity DElectricFieldStrength type DElectricChargeDensity = 'Dim 'Neg3 'Zero 'Pos1 'Pos1 'Zero 'Zero 'Zero type ElectricChargeDensity = Quantity DElectricChargeDensity type DElectricFluxDensity = 'Dim 'Neg2 'Zero 'Pos1 'Pos1 'Zero 'Zero 'Zero type ElectricFluxDensity = Quantity DElectricFluxDensity type DPermittivity = 'Dim 'Neg3 'Neg1 'Pos4 'Pos2 'Zero 'Zero 'Zero type Permittivity = Quantity DPermittivity type DPermeability = 'Dim 'Pos1 'Pos1 'Neg2 'Neg2 'Zero 'Zero 'Zero type Permeability = Quantity DPermeability type DMolarEnergy = 'Dim 'Pos2 'Pos1 'Neg2 'Zero 'Zero 'Neg1 'Zero type MolarEnergy = Quantity DMolarEnergy type DMolarEntropy = 'Dim 'Pos2 'Pos1 'Neg2 'Zero 'Neg1 'Neg1 'Zero type DMolarHeatCapacity = DMolarEntropy type MolarEntropy = Quantity DMolarEntropy type MolarHeatCapacity = Quantity DMolarHeatCapacity type DExposure = 'Dim 'Zero 'Neg1 'Pos1 'Pos1 'Zero 'Zero 'Zero type Exposure = Quantity DExposure -- Exposure to x and gamma rays. type DAbsorbedDoseRate = 'Dim 'Pos2 'Zero 'Neg3 'Zero 'Zero 'Zero 'Zero type AbsorbedDoseRate = Quantity DAbsorbedDoseRate {- $not-nist-guide Here we define additional quantities on an as-needed basis. We also provide some synonyms that we anticipate will be useful. -} type DImpulse = 'Dim 'Pos1 'Pos1 'Neg1 'Zero 'Zero 'Zero 'Zero type Impulse = Quantity DImpulse type DMomentum = DImpulse type Momentum = Quantity DMomentum type DMassFlow = 'Dim 'Zero 'Pos1 'Neg1 'Zero 'Zero 'Zero 'Zero type MassFlow = Quantity DMassFlow type DVolumeFlow = 'Dim 'Pos3 'Zero 'Neg1 'Zero 'Zero 'Zero 'Zero type VolumeFlow = Quantity DVolumeFlow type DGravitationalParameter = 'Dim 'Pos3 'Zero 'Neg2 'Zero 'Zero 'Zero 'Zero type GravitationalParameter = Quantity DGravitationalParameter type DKinematicViscosity = 'Dim 'Pos2 'Zero 'Neg1 'Zero 'Zero 'Zero 'Zero type KinematicViscosity = Quantity DKinematicViscosity type DFirstMassMoment = 'Dim 'Pos1 'Pos1 'Zero 'Zero 'Zero 'Zero 'Zero type FirstMassMoment = Quantity DFirstMassMoment type DMomentOfInertia = 'Dim 'Pos2 'Pos1 'Zero 'Zero 'Zero 'Zero 'Zero type MomentOfInertia = Quantity DMomentOfInertia type DAngularMomentum = 'Dim 'Pos2 'Pos1 'Neg1 'Zero 'Zero 'Zero 'Zero type AngularMomentum = Quantity DAngularMomentum {- The reciprocal of thermal conductivity. -} type DThermalResistivity = 'Dim 'Neg1 'Neg1 'Pos3 'Zero 'Pos1 'Zero 'Zero type ThermalResistivity = Quantity DThermalResistivity {- Thermal conductance and resistance quantities after http://en.wikipedia.org/wiki/Thermal_conductivity#Definitions. -} type DThermalConductance = 'Dim 'Pos2 'Pos1 'Neg3 'Zero 'Neg1 'Zero 'Zero type ThermalConductance = Quantity DThermalConductance type DThermalResistance = 'Dim 'Neg2 'Neg1 'Pos3 'Zero 'Pos1 'Zero 'Zero type ThermalResistance = Quantity DThermalResistance type DHeatTransferCoefficient = 'Dim 'Zero 'Pos1 'Neg3 'Zero 'Neg1 'Zero 'Zero type HeatTransferCoefficient = Quantity DHeatTransferCoefficient type DThermalAdmittance = DHeatTransferCoefficient type ThermalAdmittance = HeatTransferCoefficient type DThermalInsulance = 'Dim 'Zero 'Neg1 'Pos3 'Zero 'Pos1 'Zero 'Zero type ThermalInsulance = Quantity DThermalInsulance type DJerk = 'Dim 'Pos1 'Zero 'Neg3 'Zero 'Zero 'Zero 'Zero type Jerk = Quantity DJerk type Angle = PlaneAngle -- Abbreviation type DAngle = DPlaneAngle -- Abbreviation type Thrust = Force type DThrust = DForce type Torque = MomentOfForce type DTorque = DMomentOfForce type EnergyPerUnitMass = SpecificEnergy type DEnergyPerUnitMass = DSpecificEnergy {- $powers-of-length-units It is permissible to express powers of length units by prefixing 'square' and 'cubic' (see section 9.6 "Spelling unit names raised to powers" of <#note1 [1]>). These definitions may seem slightly out of place but these is no obvious place where they should be. Here they are at least close to the definitions of 'DArea' and 'DVolume'. -} -- $setup -- >>> import Numeric.Units.Dimensional.Prelude -- | Constructs a unit of area from a unit of length, taking the area of a square whose sides are that length. -- -- >>> 64 *~ square meter == (8 *~ meter) ^ pos2 -- True square :: (Fractional a, Typeable m) => Unit m DLength a -> Unit 'NonMetric DArea a square x = x ^ pos2 -- | Constructs a unit of volume from a unit of length, taking the volume of a cube whose sides are that length. -- -- >>> 64 *~ cubic meter == (4 *~ meter) ^ pos3 -- True cubic :: (Fractional a, Typeable m) => Unit m DLength a -> Unit 'NonMetric DVolume a cubic x = x ^ pos3
bjornbm/dimensional-dk
src/Numeric/Units/Dimensional/Quantities.hs
bsd-3-clause
16,831
0
8
2,576
3,939
2,225
1,714
233
1
import Control.Monad (when) import Src import B main = when (funMainLib 41 /= funInternal 43) $ error "test failed"
juhp/stack
test/integration/tests/4105-test-coverage-of-internal-lib/files/test/Main.hs
bsd-3-clause
118
0
9
21
45
23
22
4
1
-- Cmm representations using Hoopl's Graph CmmNode e x. {-# LANGUAGE CPP, GADTs #-} module Cmm ( -- * Cmm top-level datatypes CmmProgram, CmmGroup, GenCmmGroup, CmmDecl, GenCmmDecl(..), CmmGraph, GenCmmGraph(..), CmmBlock, RawCmmDecl, RawCmmGroup, Section(..), CmmStatics(..), CmmStatic(..), -- ** Blocks containing lists GenBasicBlock(..), blockId, ListGraph(..), pprBBlock, -- * Cmm graphs CmmReplGraph, GenCmmReplGraph, CmmFwdRewrite, CmmBwdRewrite, -- * Info Tables CmmTopInfo(..), CmmStackInfo(..), CmmInfoTable(..), topInfoTable, ClosureTypeInfo(..), C_SRT(..), needsSRT, ProfilingInfo(..), ConstrDescription, -- * Statements, expressions and types module CmmNode, module CmmExpr, ) where import CLabel import BlockId import CmmNode import SMRep import CmmExpr import UniqSupply import Compiler.Hoopl import Outputable import Data.Word ( Word8 ) #include "HsVersions.h" ----------------------------------------------------------------------------- -- Cmm, GenCmm ----------------------------------------------------------------------------- -- A CmmProgram is a list of CmmGroups -- A CmmGroup is a list of top-level declarations -- When object-splitting is on,each group is compiled into a separate -- .o file. So typically we put closely related stuff in a CmmGroup. type CmmProgram = [CmmGroup] type GenCmmGroup d h g = [GenCmmDecl d h g] type CmmGroup = GenCmmGroup CmmStatics CmmTopInfo CmmGraph type RawCmmGroup = GenCmmGroup CmmStatics (BlockEnv CmmStatics) CmmGraph ----------------------------------------------------------------------------- -- CmmDecl, GenCmmDecl ----------------------------------------------------------------------------- -- GenCmmDecl is abstracted over -- d, the type of static data elements in CmmData -- h, the static info preceding the code of a CmmProc -- g, the control-flow graph of a CmmProc -- -- We expect there to be two main instances of this type: -- (a) C--, i.e. populated with various C-- constructs -- (b) Native code, populated with data/instructions -- | A top-level chunk, abstracted over the type of the contents of -- the basic blocks (Cmm or instructions are the likely instantiations). data GenCmmDecl d h g = CmmProc -- A procedure h -- Extra header such as the info table CLabel -- Entry label [GlobalReg] -- Registers live on entry. Note that the set of live -- registers will be correct in generated C-- code, but -- not in hand-written C-- code. However, -- splitAtProcPoints calculates correct liveness -- information for CmmProc's. Right now only the LLVM -- back-end relies on correct liveness information and -- for that back-end we always call splitAtProcPoints, so -- all is good. g -- Control-flow graph for the procedure's code | CmmData -- Static data Section d type CmmDecl = GenCmmDecl CmmStatics CmmTopInfo CmmGraph type RawCmmDecl = GenCmmDecl CmmStatics (BlockEnv CmmStatics) CmmGraph ----------------------------------------------------------------------------- -- Graphs ----------------------------------------------------------------------------- type CmmGraph = GenCmmGraph CmmNode data GenCmmGraph n = CmmGraph { g_entry :: BlockId, g_graph :: Graph n C C } type CmmBlock = Block CmmNode C C type CmmReplGraph e x = GenCmmReplGraph CmmNode e x type GenCmmReplGraph n e x = UniqSM (Maybe (Graph n e x)) type CmmFwdRewrite f = FwdRewrite UniqSM CmmNode f type CmmBwdRewrite f = BwdRewrite UniqSM CmmNode f ----------------------------------------------------------------------------- -- Info Tables ----------------------------------------------------------------------------- data CmmTopInfo = TopInfo { info_tbls :: BlockEnv CmmInfoTable , stack_info :: CmmStackInfo } topInfoTable :: GenCmmDecl a CmmTopInfo (GenCmmGraph n) -> Maybe CmmInfoTable topInfoTable (CmmProc infos _ _ g) = mapLookup (g_entry g) (info_tbls infos) topInfoTable _ = Nothing data CmmStackInfo = StackInfo { arg_space :: ByteOff, -- number of bytes of arguments on the stack on entry to the -- the proc. This is filled in by StgCmm.codeGen, and used -- by the stack allocator later. updfr_space :: Maybe ByteOff, -- XXX: this never contains anything useful, but it should. -- See comment in CmmLayoutStack. do_layout :: Bool -- Do automatic stack layout for this proc. This is -- True for all code generated by the code generator, -- but is occasionally False for hand-written Cmm where -- we want to do the stack manipulation manually. } -- | Info table as a haskell data type data CmmInfoTable = CmmInfoTable { cit_lbl :: CLabel, -- Info table label cit_rep :: SMRep, cit_prof :: ProfilingInfo, cit_srt :: C_SRT } data ProfilingInfo = NoProfilingInfo | ProfilingInfo [Word8] [Word8] -- closure_type, closure_desc -- C_SRT is what StgSyn.SRT gets translated to... -- we add a label for the table, and expect only the 'offset/length' form data C_SRT = NoC_SRT | C_SRT !CLabel !WordOff !StgHalfWord {-bitmap or escape-} deriving (Eq) needsSRT :: C_SRT -> Bool needsSRT NoC_SRT = False needsSRT (C_SRT _ _ _) = True ----------------------------------------------------------------------------- -- Static Data ----------------------------------------------------------------------------- data Section = Text | Data | ReadOnlyData | RelocatableReadOnlyData | UninitialisedData | ReadOnlyData16 -- .rodata.cst16 on x86_64, 16-byte aligned | OtherSection String data CmmStatic = CmmStaticLit CmmLit -- a literal value, size given by cmmLitRep of the literal. | CmmUninitialised Int -- uninitialised data, N bytes long | CmmString [Word8] -- string of 8-bit values only, not zero terminated. data CmmStatics = Statics CLabel -- Label of statics [CmmStatic] -- The static data itself -- ----------------------------------------------------------------------------- -- Basic blocks consisting of lists -- These are used by the LLVM and NCG backends, when populating Cmm -- with lists of instructions. data GenBasicBlock i = BasicBlock BlockId [i] -- | The branch block id is that of the first block in -- the branch, which is that branch's entry point blockId :: GenBasicBlock i -> BlockId blockId (BasicBlock blk_id _ ) = blk_id newtype ListGraph i = ListGraph [GenBasicBlock i] instance Outputable instr => Outputable (ListGraph instr) where ppr (ListGraph blocks) = vcat (map ppr blocks) instance Outputable instr => Outputable (GenBasicBlock instr) where ppr = pprBBlock pprBBlock :: Outputable stmt => GenBasicBlock stmt -> SDoc pprBBlock (BasicBlock ident stmts) = hang (ppr ident <> colon) 4 (vcat (map ppr stmts))
frantisekfarka/ghc-dsi
compiler/cmm/Cmm.hs
bsd-3-clause
7,316
0
9
1,740
1,009
616
393
110
1
-- (c) The University of Glasgow 2006 -- (c) The GRASP/AQUA Project, Glasgow University, 1998 -- -- Type - public interface {-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-orphans #-} -- | Main functions for manipulating types and type-related things module Type ( -- Note some of this is just re-exports from TyCon.. -- * Main data types representing Types -- $type_classification -- $representation_types TyThing(..), Type, KindOrType, PredType, ThetaType, Var, TyVar, isTyVar, -- ** Constructing and deconstructing types mkTyVarTy, mkTyVarTys, getTyVar, getTyVar_maybe, mkAppTy, mkAppTys, splitAppTy, splitAppTys, splitAppTy_maybe, repSplitAppTy_maybe, mkFunTy, mkFunTys, splitFunTy, splitFunTy_maybe, splitFunTys, splitFunTysN, funResultTy, funArgTy, zipFunTys, mkTyConApp, mkTyConTy, tyConAppTyCon_maybe, tyConAppArgs_maybe, tyConAppTyCon, tyConAppArgs, splitTyConApp_maybe, splitTyConApp, tyConAppArgN, nextRole, mkForAllTy, mkForAllTys, splitForAllTy_maybe, splitForAllTys, mkPiKinds, mkPiType, mkPiTypes, applyTy, applyTys, applyTysD, applyTysX, dropForAlls, mkNumLitTy, isNumLitTy, mkStrLitTy, isStrLitTy, coAxNthLHS, -- (Newtypes) newTyConInstRhs, -- Pred types mkFamilyTyConApp, isDictLikeTy, mkEqPred, mkCoerciblePred, mkPrimEqPred, mkReprPrimEqPred, mkClassPred, isClassPred, isEqPred, isIPPred, isIPPred_maybe, isIPTyCon, isIPClass, -- Deconstructing predicate types PredTree(..), EqRel(..), eqRelRole, classifyPredType, getClassPredTys, getClassPredTys_maybe, getEqPredTys, getEqPredTys_maybe, getEqPredRole, predTypeEqRel, -- ** Common type constructors funTyCon, -- ** Predicates on types isTypeVar, isKindVar, allDistinctTyVars, isForAllTy, isTyVarTy, isFunTy, isDictTy, isPredTy, isVoidTy, -- (Lifting and boxity) isUnLiftedType, isUnboxedTupleType, isAlgType, isClosedAlgType, isPrimitiveType, isStrictType, -- * Main data types representing Kinds -- $kind_subtyping Kind, SimpleKind, MetaKindVar, -- ** Finding the kind of a type typeKind, -- ** Common Kinds and SuperKinds anyKind, liftedTypeKind, unliftedTypeKind, openTypeKind, constraintKind, superKind, -- ** Common Kind type constructors liftedTypeKindTyCon, openTypeKindTyCon, unliftedTypeKindTyCon, constraintKindTyCon, anyKindTyCon, -- * Type free variables tyVarsOfType, tyVarsOfTypes, closeOverKinds, expandTypeSynonyms, typeSize, varSetElemsKvsFirst, -- * Type comparison eqType, eqTypeX, eqTypes, cmpType, cmpTypes, eqPred, eqPredX, cmpPred, eqKind, eqTyVarBndrs, -- * Forcing evaluation of types seqType, seqTypes, -- * Other views onto Types coreView, tcView, UnaryType, RepType(..), flattenRepType, repType, tyConsOfType, -- * Type representation for the code generator typePrimRep, typeRepArity, -- * Main type substitution data types TvSubstEnv, -- Representation widely visible TvSubst(..), -- Representation visible to a few friends -- ** Manipulating type substitutions emptyTvSubstEnv, emptyTvSubst, mkTvSubst, mkOpenTvSubst, zipOpenTvSubst, zipTopTvSubst, mkTopTvSubst, notElemTvSubst, getTvSubstEnv, setTvSubstEnv, zapTvSubstEnv, getTvInScope, extendTvInScope, extendTvInScopeList, extendTvSubst, extendTvSubstList, isInScope, composeTvSubst, zipTyEnv, isEmptyTvSubst, unionTvSubst, -- ** Performing substitution on types and kinds substTy, substTys, substTyWith, substTysWith, substTheta, substTyVar, substTyVars, substTyVarBndr, cloneTyVarBndr, deShadowTy, lookupTyVar, substKiWith, substKisWith, -- * Pretty-printing pprType, pprParendType, pprTypeApp, pprTyThingCategory, pprTyThing, pprTvBndr, pprTvBndrs, pprForAll, pprUserForAll, pprSigmaType, pprTheta, pprThetaArrowTy, pprClassPred, pprKind, pprParendKind, pprSourceTyCon, TyPrec(..), maybeParen, pprSigmaTypeExtraCts, -- * Tidying type related things up for printing tidyType, tidyTypes, tidyOpenType, tidyOpenTypes, tidyOpenKind, tidyTyVarBndr, tidyTyVarBndrs, tidyFreeTyVars, tidyOpenTyVar, tidyOpenTyVars, tidyTyVarOcc, tidyTopType, tidyKind, ) where #include "HsVersions.h" -- We import the representation and primitive functions from TypeRep. -- Many things are reexported, but not the representation! import Kind import TypeRep -- friends: import Var import VarEnv import VarSet import NameEnv import Class import TyCon import TysPrim import {-# SOURCE #-} TysWiredIn ( eqTyCon, coercibleTyCon, typeNatKind, typeSymbolKind ) import PrelNames ( eqTyConKey, coercibleTyConKey, ipClassNameKey, openTypeKindTyConKey, constraintKindTyConKey, liftedTypeKindTyConKey ) import CoAxiom -- others import Unique ( Unique, hasKey ) import BasicTypes ( Arity, RepArity ) import Util import ListSetOps ( getNth ) import Outputable import FastString import Maybes ( orElse ) import Data.Maybe ( isJust ) import Control.Monad ( guard ) infixr 3 `mkFunTy` -- Associates to the right -- $type_classification -- #type_classification# -- -- Types are one of: -- -- [Unboxed] Iff its representation is other than a pointer -- Unboxed types are also unlifted. -- -- [Lifted] Iff it has bottom as an element. -- Closures always have lifted types: i.e. any -- let-bound identifier in Core must have a lifted -- type. Operationally, a lifted object is one that -- can be entered. -- Only lifted types may be unified with a type variable. -- -- [Algebraic] Iff it is a type with one or more constructors, whether -- declared with @data@ or @newtype@. -- An algebraic type is one that can be deconstructed -- with a case expression. This is /not/ the same as -- lifted types, because we also include unboxed -- tuples in this classification. -- -- [Data] Iff it is a type declared with @data@, or a boxed tuple. -- -- [Primitive] Iff it is a built-in type that can't be expressed in Haskell. -- -- Currently, all primitive types are unlifted, but that's not necessarily -- the case: for example, @Int@ could be primitive. -- -- Some primitive types are unboxed, such as @Int#@, whereas some are boxed -- but unlifted (such as @ByteArray#@). The only primitive types that we -- classify as algebraic are the unboxed tuples. -- -- Some examples of type classifications that may make this a bit clearer are: -- -- @ -- Type primitive boxed lifted algebraic -- ----------------------------------------------------------------------------- -- Int# Yes No No No -- ByteArray# Yes Yes No No -- (\# a, b \#) Yes No No Yes -- ( a, b ) No Yes Yes Yes -- [a] No Yes Yes Yes -- @ -- $representation_types -- A /source type/ is a type that is a separate type as far as the type checker is -- concerned, but which has a more low-level representation as far as Core-to-Core -- passes and the rest of the back end is concerned. -- -- You don't normally have to worry about this, as the utility functions in -- this module will automatically convert a source into a representation type -- if they are spotted, to the best of it's abilities. If you don't want this -- to happen, use the equivalent functions from the "TcType" module. {- ************************************************************************ * * Type representation * * ************************************************************************ -} {-# INLINE coreView #-} coreView :: Type -> Maybe Type -- ^ In Core, we \"look through\" non-recursive newtypes and 'PredTypes': this -- function tries to obtain a different view of the supplied type given this -- -- Strips off the /top layer only/ of a type to give -- its underlying representation type. -- Returns Nothing if there is nothing to look through. -- -- By being non-recursive and inlined, this case analysis gets efficiently -- joined onto the case analysis that the caller is already doing coreView (TyConApp tc tys) | Just (tenv, rhs, tys') <- coreExpandTyCon_maybe tc tys = Just (mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys') -- Its important to use mkAppTys, rather than (foldl AppTy), -- because the function part might well return a -- partially-applied type constructor; indeed, usually will! coreView _ = Nothing ----------------------------------------------- {-# INLINE tcView #-} tcView :: Type -> Maybe Type -- ^ Similar to 'coreView', but for the type checker, which just looks through synonyms tcView (TyConApp tc tys) | Just (tenv, rhs, tys') <- tcExpandTyCon_maybe tc tys = Just (mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys') tcView _ = Nothing -- You might think that tcView belows in TcType rather than Type, but unfortunately -- it is needed by Unify, which is turn imported by Coercion (for MatchEnv and matchList). -- So we will leave it here to avoid module loops. ----------------------------------------------- expandTypeSynonyms :: Type -> Type -- ^ Expand out all type synonyms. Actually, it'd suffice to expand out -- just the ones that discard type variables (e.g. type Funny a = Int) -- But we don't know which those are currently, so we just expand all. expandTypeSynonyms ty = go ty where go (TyConApp tc tys) | Just (tenv, rhs, tys') <- tcExpandTyCon_maybe tc tys = go (mkAppTys (substTy (mkTopTvSubst tenv) rhs) tys') | otherwise = TyConApp tc (map go tys) go (LitTy l) = LitTy l go (TyVarTy tv) = TyVarTy tv go (AppTy t1 t2) = mkAppTy (go t1) (go t2) go (FunTy t1 t2) = FunTy (go t1) (go t2) go (ForAllTy tv t) = ForAllTy tv (go t) {- ************************************************************************ * * \subsection{Constructor-specific functions} * * ************************************************************************ --------------------------------------------------------------------- TyVarTy ~~~~~~~ -} -- | Attempts to obtain the type variable underlying a 'Type', and panics with the -- given message if this is not a type variable type. See also 'getTyVar_maybe' getTyVar :: String -> Type -> TyVar getTyVar msg ty = case getTyVar_maybe ty of Just tv -> tv Nothing -> panic ("getTyVar: " ++ msg) isTyVarTy :: Type -> Bool isTyVarTy ty = isJust (getTyVar_maybe ty) -- | Attempts to obtain the type variable underlying a 'Type' getTyVar_maybe :: Type -> Maybe TyVar getTyVar_maybe ty | Just ty' <- coreView ty = getTyVar_maybe ty' getTyVar_maybe (TyVarTy tv) = Just tv getTyVar_maybe _ = Nothing allDistinctTyVars :: [KindOrType] -> Bool allDistinctTyVars tkvs = go emptyVarSet tkvs where go _ [] = True go so_far (ty : tys) = case getTyVar_maybe ty of Nothing -> False Just tv | tv `elemVarSet` so_far -> False | otherwise -> go (so_far `extendVarSet` tv) tys {- --------------------------------------------------------------------- AppTy ~~~~~ We need to be pretty careful with AppTy to make sure we obey the invariant that a TyConApp is always visibly so. mkAppTy maintains the invariant: use it. -} -- | Applies a type to another, as in e.g. @k a@ mkAppTy :: Type -> Type -> Type mkAppTy (TyConApp tc tys) ty2 = mkTyConApp tc (tys ++ [ty2]) mkAppTy ty1 ty2 = AppTy ty1 ty2 -- Note that the TyConApp could be an -- under-saturated type synonym. GHC allows that; e.g. -- type Foo k = k a -> k a -- type Id x = x -- foo :: Foo Id -> Foo Id -- -- Here Id is partially applied in the type sig for Foo, -- but once the type synonyms are expanded all is well mkAppTys :: Type -> [Type] -> Type mkAppTys ty1 [] = ty1 mkAppTys (TyConApp tc tys1) tys2 = mkTyConApp tc (tys1 ++ tys2) mkAppTys ty1 tys2 = foldl AppTy ty1 tys2 ------------- splitAppTy_maybe :: Type -> Maybe (Type, Type) -- ^ Attempt to take a type application apart, whether it is a -- function, type constructor, or plain type application. Note -- that type family applications are NEVER unsaturated by this! splitAppTy_maybe ty | Just ty' <- coreView ty = splitAppTy_maybe ty' splitAppTy_maybe ty = repSplitAppTy_maybe ty ------------- repSplitAppTy_maybe :: Type -> Maybe (Type,Type) -- ^ Does the AppTy split as in 'splitAppTy_maybe', but assumes that -- any Core view stuff is already done repSplitAppTy_maybe (FunTy ty1 ty2) = Just (TyConApp funTyCon [ty1], ty2) repSplitAppTy_maybe (AppTy ty1 ty2) = Just (ty1, ty2) repSplitAppTy_maybe (TyConApp tc tys) | isDecomposableTyCon tc || tys `lengthExceeds` tyConArity tc , Just (tys', ty') <- snocView tys = Just (TyConApp tc tys', ty') -- Never create unsaturated type family apps! repSplitAppTy_maybe _other = Nothing ------------- splitAppTy :: Type -> (Type, Type) -- ^ Attempts to take a type application apart, as in 'splitAppTy_maybe', -- and panics if this is not possible splitAppTy ty = case splitAppTy_maybe ty of Just pr -> pr Nothing -> panic "splitAppTy" ------------- splitAppTys :: Type -> (Type, [Type]) -- ^ Recursively splits a type as far as is possible, leaving a residual -- type being applied to and the type arguments applied to it. Never fails, -- even if that means returning an empty list of type applications. splitAppTys ty = split ty ty [] where split orig_ty ty args | Just ty' <- coreView ty = split orig_ty ty' args split _ (AppTy ty arg) args = split ty ty (arg:args) split _ (TyConApp tc tc_args) args = let -- keep type families saturated n | isDecomposableTyCon tc = 0 | otherwise = tyConArity tc (tc_args1, tc_args2) = splitAt n tc_args in (TyConApp tc tc_args1, tc_args2 ++ args) split _ (FunTy ty1 ty2) args = ASSERT( null args ) (TyConApp funTyCon [], [ty1,ty2]) split orig_ty _ args = (orig_ty, args) {- LitTy ~~~~~ -} mkNumLitTy :: Integer -> Type mkNumLitTy n = LitTy (NumTyLit n) -- | Is this a numeric literal. We also look through type synonyms. isNumLitTy :: Type -> Maybe Integer isNumLitTy ty | Just ty1 <- tcView ty = isNumLitTy ty1 isNumLitTy (LitTy (NumTyLit n)) = Just n isNumLitTy _ = Nothing mkStrLitTy :: FastString -> Type mkStrLitTy s = LitTy (StrTyLit s) -- | Is this a symbol literal. We also look through type synonyms. isStrLitTy :: Type -> Maybe FastString isStrLitTy ty | Just ty1 <- tcView ty = isStrLitTy ty1 isStrLitTy (LitTy (StrTyLit s)) = Just s isStrLitTy _ = Nothing {- --------------------------------------------------------------------- FunTy ~~~~~ -} mkFunTy :: Type -> Type -> Type -- ^ Creates a function type from the given argument and result type mkFunTy arg res = FunTy arg res mkFunTys :: [Type] -> Type -> Type mkFunTys tys ty = foldr mkFunTy ty tys isFunTy :: Type -> Bool isFunTy ty = isJust (splitFunTy_maybe ty) splitFunTy :: Type -> (Type, Type) -- ^ Attempts to extract the argument and result types from a type, and -- panics if that is not possible. See also 'splitFunTy_maybe' splitFunTy ty | Just ty' <- coreView ty = splitFunTy ty' splitFunTy (FunTy arg res) = (arg, res) splitFunTy other = pprPanic "splitFunTy" (ppr other) splitFunTy_maybe :: Type -> Maybe (Type, Type) -- ^ Attempts to extract the argument and result types from a type splitFunTy_maybe ty | Just ty' <- coreView ty = splitFunTy_maybe ty' splitFunTy_maybe (FunTy arg res) = Just (arg, res) splitFunTy_maybe _ = Nothing splitFunTys :: Type -> ([Type], Type) splitFunTys ty = split [] ty ty where split args orig_ty ty | Just ty' <- coreView ty = split args orig_ty ty' split args _ (FunTy arg res) = split (arg:args) res res split args orig_ty _ = (reverse args, orig_ty) splitFunTysN :: Int -> Type -> ([Type], Type) -- ^ Split off exactly the given number argument types, and panics if that is not possible splitFunTysN 0 ty = ([], ty) splitFunTysN n ty = ASSERT2( isFunTy ty, int n <+> ppr ty ) case splitFunTy ty of { (arg, res) -> case splitFunTysN (n-1) res of { (args, res) -> (arg:args, res) }} -- | Splits off argument types from the given type and associating -- them with the things in the input list from left to right. The -- final result type is returned, along with the resulting pairs of -- objects and types, albeit with the list of pairs in reverse order. -- Panics if there are not enough argument types for the input list. zipFunTys :: Outputable a => [a] -> Type -> ([(a, Type)], Type) zipFunTys orig_xs orig_ty = split [] orig_xs orig_ty orig_ty where split acc [] nty _ = (reverse acc, nty) split acc xs nty ty | Just ty' <- coreView ty = split acc xs nty ty' split acc (x:xs) _ (FunTy arg res) = split ((x,arg):acc) xs res res split _ _ _ _ = pprPanic "zipFunTys" (ppr orig_xs <+> ppr orig_ty) funResultTy :: Type -> Type -- ^ Extract the function result type and panic if that is not possible funResultTy ty | Just ty' <- coreView ty = funResultTy ty' funResultTy (FunTy _arg res) = res funResultTy ty = pprPanic "funResultTy" (ppr ty) funArgTy :: Type -> Type -- ^ Extract the function argument type and panic if that is not possible funArgTy ty | Just ty' <- coreView ty = funArgTy ty' funArgTy (FunTy arg _res) = arg funArgTy ty = pprPanic "funArgTy" (ppr ty) {- --------------------------------------------------------------------- TyConApp ~~~~~~~~ -} -- | A key function: builds a 'TyConApp' or 'FunTy' as appropriate to -- its arguments. Applies its arguments to the constructor from left to right. mkTyConApp :: TyCon -> [Type] -> Type mkTyConApp tycon tys | isFunTyCon tycon, [ty1,ty2] <- tys = FunTy ty1 ty2 | otherwise = TyConApp tycon tys -- splitTyConApp "looks through" synonyms, because they don't -- mean a distinct type, but all other type-constructor applications -- including functions are returned as Just .. -- | The same as @fst . splitTyConApp@ tyConAppTyCon_maybe :: Type -> Maybe TyCon tyConAppTyCon_maybe ty | Just ty' <- coreView ty = tyConAppTyCon_maybe ty' tyConAppTyCon_maybe (TyConApp tc _) = Just tc tyConAppTyCon_maybe (FunTy {}) = Just funTyCon tyConAppTyCon_maybe _ = Nothing tyConAppTyCon :: Type -> TyCon tyConAppTyCon ty = tyConAppTyCon_maybe ty `orElse` pprPanic "tyConAppTyCon" (ppr ty) -- | The same as @snd . splitTyConApp@ tyConAppArgs_maybe :: Type -> Maybe [Type] tyConAppArgs_maybe ty | Just ty' <- coreView ty = tyConAppArgs_maybe ty' tyConAppArgs_maybe (TyConApp _ tys) = Just tys tyConAppArgs_maybe (FunTy arg res) = Just [arg,res] tyConAppArgs_maybe _ = Nothing tyConAppArgs :: Type -> [Type] tyConAppArgs ty = tyConAppArgs_maybe ty `orElse` pprPanic "tyConAppArgs" (ppr ty) tyConAppArgN :: Int -> Type -> Type -- Executing Nth tyConAppArgN n ty = case tyConAppArgs_maybe ty of Just tys -> ASSERT2( n < length tys, ppr n <+> ppr tys ) tys !! n Nothing -> pprPanic "tyConAppArgN" (ppr n <+> ppr ty) -- | Attempts to tease a type apart into a type constructor and the application -- of a number of arguments to that constructor. Panics if that is not possible. -- See also 'splitTyConApp_maybe' splitTyConApp :: Type -> (TyCon, [Type]) splitTyConApp ty = case splitTyConApp_maybe ty of Just stuff -> stuff Nothing -> pprPanic "splitTyConApp" (ppr ty) -- | Attempts to tease a type apart into a type constructor and the application -- of a number of arguments to that constructor splitTyConApp_maybe :: Type -> Maybe (TyCon, [Type]) splitTyConApp_maybe ty | Just ty' <- coreView ty = splitTyConApp_maybe ty' splitTyConApp_maybe (TyConApp tc tys) = Just (tc, tys) splitTyConApp_maybe (FunTy arg res) = Just (funTyCon, [arg,res]) splitTyConApp_maybe _ = Nothing -- | What is the role assigned to the next parameter of this type? Usually, -- this will be 'Nominal', but if the type is a 'TyConApp', we may be able to -- do better. The type does *not* have to be well-kinded when applied for this -- to work! nextRole :: Type -> Role nextRole ty | Just (tc, tys) <- splitTyConApp_maybe ty , let num_tys = length tys , num_tys < tyConArity tc = tyConRoles tc `getNth` num_tys | otherwise = Nominal newTyConInstRhs :: TyCon -> [Type] -> Type -- ^ Unwrap one 'layer' of newtype on a type constructor and its -- arguments, using an eta-reduced version of the @newtype@ if possible. -- This requires tys to have at least @newTyConInstArity tycon@ elements. newTyConInstRhs tycon tys = ASSERT2( tvs `leLength` tys, ppr tycon $$ ppr tys $$ ppr tvs ) applyTysX tvs rhs tys where (tvs, rhs) = newTyConEtadRhs tycon {- --------------------------------------------------------------------- SynTy ~~~~~ Notes on type synonyms ~~~~~~~~~~~~~~~~~~~~~~ The various "split" functions (splitFunTy, splitRhoTy, splitForAllTy) try to return type synonyms wherever possible. Thus type Foo a = a -> a we want splitFunTys (a -> Foo a) = ([a], Foo a) not ([a], a -> a) The reason is that we then get better (shorter) type signatures in interfaces. Notably this plays a role in tcTySigs in TcBinds.lhs. Representation types ~~~~~~~~~~~~~~~~~~~~ Note [Nullary unboxed tuple] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We represent the nullary unboxed tuple as the unary (but void) type Void#. The reason for this is that the ReprArity is never less than the Arity (as it would otherwise be for a function type like (# #) -> Int). As a result, ReprArity is always strictly positive if Arity is. This is important because it allows us to distinguish at runtime between a thunk and a function takes a nullary unboxed tuple as an argument! -} type UnaryType = Type data RepType = UbxTupleRep [UnaryType] -- INVARIANT: never an empty list (see Note [Nullary unboxed tuple]) | UnaryRep UnaryType flattenRepType :: RepType -> [UnaryType] flattenRepType (UbxTupleRep tys) = tys flattenRepType (UnaryRep ty) = [ty] -- | Looks through: -- -- 1. For-alls -- 2. Synonyms -- 3. Predicates -- 4. All newtypes, including recursive ones, but not newtype families -- -- It's useful in the back end of the compiler. repType :: Type -> RepType repType ty = go initRecTc ty where go :: RecTcChecker -> Type -> RepType go rec_nts ty -- Expand predicates and synonyms | Just ty' <- coreView ty = go rec_nts ty' go rec_nts (ForAllTy _ ty) -- Drop foralls = go rec_nts ty go rec_nts (TyConApp tc tys) -- Expand newtypes | isNewTyCon tc , tys `lengthAtLeast` tyConArity tc , Just rec_nts' <- checkRecTc rec_nts tc -- See Note [Expanding newtypes] in TyCon = go rec_nts' (newTyConInstRhs tc tys) | isUnboxedTupleTyCon tc = if null tys then UnaryRep voidPrimTy -- See Note [Nullary unboxed tuple] else UbxTupleRep (concatMap (flattenRepType . go rec_nts) tys) go _ ty = UnaryRep ty -- | All type constructors occurring in the type; looking through type -- synonyms, but not newtypes. -- When it finds a Class, it returns the class TyCon. tyConsOfType :: Type -> NameEnv TyCon tyConsOfType ty = go ty where go :: Type -> NameEnv TyCon -- The NameEnv does duplicate elim go ty | Just ty' <- tcView ty = go ty' go (TyVarTy {}) = emptyNameEnv go (LitTy {}) = emptyNameEnv go (TyConApp tc tys) = go_tc tc tys go (AppTy a b) = go a `plusNameEnv` go b go (FunTy a b) = go a `plusNameEnv` go b go (ForAllTy _ ty) = go ty go_tc tc tys = extendNameEnv (go_s tys) (tyConName tc) tc go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys -- ToDo: this could be moved to the code generator, using splitTyConApp instead -- of inspecting the type directly. -- | Discovers the primitive representation of a more abstract 'UnaryType' typePrimRep :: UnaryType -> PrimRep typePrimRep ty = case repType ty of UbxTupleRep _ -> pprPanic "typePrimRep: UbxTupleRep" (ppr ty) UnaryRep rep -> case rep of TyConApp tc _ -> tyConPrimRep tc FunTy _ _ -> PtrRep AppTy _ _ -> PtrRep -- See Note [AppTy rep] TyVarTy _ -> PtrRep _ -> pprPanic "typePrimRep: UnaryRep" (ppr ty) typeRepArity :: Arity -> Type -> RepArity typeRepArity 0 _ = 0 typeRepArity n ty = case repType ty of UnaryRep (FunTy ty1 ty2) -> length (flattenRepType (repType ty1)) + typeRepArity (n - 1) ty2 _ -> pprPanic "typeRepArity: arity greater than type can handle" (ppr (n, ty)) isVoidTy :: Type -> Bool -- True if the type has zero width isVoidTy ty = case repType ty of UnaryRep (TyConApp tc _) -> isVoidRep (tyConPrimRep tc) _ -> False {- Note [AppTy rep] ~~~~~~~~~~~~~~~~ Types of the form 'f a' must be of kind *, not #, so we are guaranteed that they are represented by pointers. The reason is that f must have kind (kk -> kk) and kk cannot be unlifted; see Note [The kind invariant] in TypeRep. --------------------------------------------------------------------- ForAllTy ~~~~~~~~ -} mkForAllTy :: TyVar -> Type -> Type mkForAllTy tyvar ty = ForAllTy tyvar ty -- | Wraps foralls over the type using the provided 'TyVar's from left to right mkForAllTys :: [TyVar] -> Type -> Type mkForAllTys tyvars ty = foldr ForAllTy ty tyvars mkPiKinds :: [TyVar] -> Kind -> Kind -- mkPiKinds [k1, k2, (a:k1 -> *)] k2 -- returns forall k1 k2. (k1 -> *) -> k2 mkPiKinds [] res = res mkPiKinds (tv:tvs) res | isKindVar tv = ForAllTy tv (mkPiKinds tvs res) | otherwise = FunTy (tyVarKind tv) (mkPiKinds tvs res) mkPiType :: Var -> Type -> Type -- ^ Makes a @(->)@ type or a forall type, depending -- on whether it is given a type variable or a term variable. mkPiTypes :: [Var] -> Type -> Type -- ^ 'mkPiType' for multiple type or value arguments mkPiType v ty | isId v = mkFunTy (varType v) ty | otherwise = mkForAllTy v ty mkPiTypes vs ty = foldr mkPiType ty vs isForAllTy :: Type -> Bool isForAllTy (ForAllTy _ _) = True isForAllTy _ = False -- | Attempts to take a forall type apart, returning the bound type variable -- and the remainder of the type splitForAllTy_maybe :: Type -> Maybe (TyVar, Type) splitForAllTy_maybe ty = splitFAT_m ty where splitFAT_m ty | Just ty' <- coreView ty = splitFAT_m ty' splitFAT_m (ForAllTy tyvar ty) = Just(tyvar, ty) splitFAT_m _ = Nothing -- | Attempts to take a forall type apart, returning all the immediate such bound -- type variables and the remainder of the type. Always suceeds, even if that means -- returning an empty list of 'TyVar's splitForAllTys :: Type -> ([TyVar], Type) splitForAllTys ty = split ty ty [] where split orig_ty ty tvs | Just ty' <- coreView ty = split orig_ty ty' tvs split _ (ForAllTy tv ty) tvs = split ty ty (tv:tvs) split orig_ty _ tvs = (reverse tvs, orig_ty) -- | Equivalent to @snd . splitForAllTys@ dropForAlls :: Type -> Type dropForAlls ty = snd (splitForAllTys ty) {- -- (mkPiType now in CoreUtils) applyTy, applyTys ~~~~~~~~~~~~~~~~~ -} -- | Instantiate a forall type with one or more type arguments. -- Used when we have a polymorphic function applied to type args: -- -- > f t1 t2 -- -- We use @applyTys type-of-f [t1,t2]@ to compute the type of the expression. -- Panics if no application is possible. applyTy :: Type -> KindOrType -> Type applyTy ty arg | Just ty' <- coreView ty = applyTy ty' arg applyTy (ForAllTy tv ty) arg = substTyWith [tv] [arg] ty applyTy _ _ = panic "applyTy" applyTys :: Type -> [KindOrType] -> Type -- ^ This function is interesting because: -- -- 1. The function may have more for-alls than there are args -- -- 2. Less obviously, it may have fewer for-alls -- -- For case 2. think of: -- -- > applyTys (forall a.a) [forall b.b, Int] -- -- This really can happen, but only (I think) in situations involving -- undefined. For example: -- undefined :: forall a. a -- Term: undefined @(forall b. b->b) @Int -- This term should have type (Int -> Int), but notice that -- there are more type args than foralls in 'undefined's type. -- If you edit this function, you may need to update the GHC formalism -- See Note [GHC Formalism] in coreSyn/CoreLint.lhs applyTys ty args = applyTysD empty ty args applyTysD :: SDoc -> Type -> [Type] -> Type -- Debug version applyTysD _ orig_fun_ty [] = orig_fun_ty applyTysD doc orig_fun_ty arg_tys | n_tvs == n_args -- The vastly common case = substTyWith tvs arg_tys rho_ty | n_tvs > n_args -- Too many for-alls = substTyWith (take n_args tvs) arg_tys (mkForAllTys (drop n_args tvs) rho_ty) | otherwise -- Too many type args = ASSERT2( n_tvs > 0, doc $$ ppr orig_fun_ty $$ ppr arg_tys ) -- Zero case gives infinite loop! applyTysD doc (substTyWith tvs (take n_tvs arg_tys) rho_ty) (drop n_tvs arg_tys) where (tvs, rho_ty) = splitForAllTys orig_fun_ty n_tvs = length tvs n_args = length arg_tys applyTysX :: [TyVar] -> Type -> [Type] -> Type -- applyTyxX beta-reduces (/\tvs. body_ty) arg_tys applyTysX tvs body_ty arg_tys = ASSERT2( length arg_tys >= n_tvs, ppr tvs $$ ppr body_ty $$ ppr arg_tys ) mkAppTys (substTyWith tvs (take n_tvs arg_tys) body_ty) (drop n_tvs arg_tys) where n_tvs = length tvs {- ************************************************************************ * * Pred * * ************************************************************************ Predicates on PredType -} isPredTy :: Type -> Bool -- NB: isPredTy is used when printing types, which can happen in debug printing -- during type checking of not-fully-zonked types. So it's not cool to say -- isConstraintKind (typeKind ty) because absent zonking the type might -- be ill-kinded, and typeKind crashes -- Hence the rather tiresome story here isPredTy ty = go ty [] where go :: Type -> [KindOrType] -> Bool go (AppTy ty1 ty2) args = go ty1 (ty2 : args) go (TyConApp tc tys) args = go_k (tyConKind tc) (tys ++ args) go (TyVarTy tv) args = go_k (tyVarKind tv) args go _ _ = False go_k :: Kind -> [KindOrType] -> Bool -- True <=> kind is k1 -> .. -> kn -> Constraint go_k k [] = isConstraintKind k go_k (FunTy _ k1) (_ :args) = go_k k1 args go_k (ForAllTy kv k1) (k2:args) = go_k (substKiWith [kv] [k2] k1) args go_k _ _ = False -- Typeable * Int :: Constraint isClassPred, isEqPred, isIPPred :: PredType -> Bool isClassPred ty = case tyConAppTyCon_maybe ty of Just tyCon | isClassTyCon tyCon -> True _ -> False isEqPred ty = case tyConAppTyCon_maybe ty of Just tyCon -> tyCon `hasKey` eqTyConKey _ -> False isIPPred ty = case tyConAppTyCon_maybe ty of Just tc -> isIPTyCon tc _ -> False isIPTyCon :: TyCon -> Bool isIPTyCon tc = tc `hasKey` ipClassNameKey isIPClass :: Class -> Bool isIPClass cls = cls `hasKey` ipClassNameKey -- Class and it corresponding TyCon have the same Unique isIPPred_maybe :: Type -> Maybe (FastString, Type) isIPPred_maybe ty = do (tc,[t1,t2]) <- splitTyConApp_maybe ty guard (isIPTyCon tc) x <- isStrLitTy t1 return (x,t2) {- Make PredTypes --------------------- Equality types --------------------------------- -} -- | Creates a type equality predicate mkEqPred :: Type -> Type -> PredType mkEqPred ty1 ty2 = WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 $$ ppr k $$ ppr (typeKind ty2) ) TyConApp eqTyCon [k, ty1, ty2] where k = typeKind ty1 mkCoerciblePred :: Type -> Type -> PredType mkCoerciblePred ty1 ty2 = WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 $$ ppr k $$ ppr (typeKind ty2) ) TyConApp coercibleTyCon [k, ty1, ty2] where k = typeKind ty1 mkPrimEqPred :: Type -> Type -> Type mkPrimEqPred ty1 ty2 = WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 ) TyConApp eqPrimTyCon [k, ty1, ty2] where k = typeKind ty1 mkReprPrimEqPred :: Type -> Type -> Type mkReprPrimEqPred ty1 ty2 = WARN( not (k `eqKind` typeKind ty2), ppr ty1 $$ ppr ty2 ) TyConApp eqReprPrimTyCon [k, ty1, ty2] where k = typeKind ty1 -- --------------------- Dictionary types --------------------------------- mkClassPred :: Class -> [Type] -> PredType mkClassPred clas tys = TyConApp (classTyCon clas) tys isDictTy :: Type -> Bool isDictTy = isClassPred isDictLikeTy :: Type -> Bool -- Note [Dictionary-like types] isDictLikeTy ty | Just ty' <- coreView ty = isDictLikeTy ty' isDictLikeTy ty = case splitTyConApp_maybe ty of Just (tc, tys) | isClassTyCon tc -> True | isTupleTyCon tc -> all isDictLikeTy tys _other -> False {- Note [Dictionary-like types] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Being "dictionary-like" means either a dictionary type or a tuple thereof. In GHC 6.10 we build implication constraints which construct such tuples, and if we land up with a binding t :: (C [a], Eq [a]) t = blah then we want to treat t as cheap under "-fdicts-cheap" for example. (Implication constraints are normally inlined, but sadly not if the occurrence is itself inside an INLINE function! Until we revise the handling of implication constraints, that is.) This turned out to be important in getting good arities in DPH code. Example: class C a class D a where { foo :: a -> a } instance C a => D (Maybe a) where { foo x = x } bar :: (C a, C b) => a -> b -> (Maybe a, Maybe b) {-# INLINE bar #-} bar x y = (foo (Just x), foo (Just y)) Then 'bar' should jolly well have arity 4 (two dicts, two args), but we ended up with something like bar = __inline_me__ (\d1,d2. let t :: (D (Maybe a), D (Maybe b)) = ... in \x,y. <blah>) This is all a bit ad-hoc; eg it relies on knowing that implication constraints build tuples. Decomposing PredType -} -- | A choice of equality relation. This is separate from the type 'Role' -- because 'Phantom' does not define a (non-trivial) equality relation. data EqRel = NomEq | ReprEq deriving (Eq, Ord) instance Outputable EqRel where ppr NomEq = text "nominal equality" ppr ReprEq = text "representational equality" eqRelRole :: EqRel -> Role eqRelRole NomEq = Nominal eqRelRole ReprEq = Representational data PredTree = ClassPred Class [Type] | EqPred EqRel Type Type | TuplePred [PredType] | IrredPred PredType classifyPredType :: PredType -> PredTree classifyPredType ev_ty = case splitTyConApp_maybe ev_ty of Just (tc, tys) | tc `hasKey` coercibleTyConKey , let [_, ty1, ty2] = tys -> EqPred ReprEq ty1 ty2 Just (tc, tys) | tc `hasKey` eqTyConKey , let [_, ty1, ty2] = tys -> EqPred NomEq ty1 ty2 -- NB: Coercible is also a class, so this check must come *after* -- the Coercible check Just (tc, tys) | Just clas <- tyConClass_maybe tc -> ClassPred clas tys Just (tc, tys) | isTupleTyCon tc -> TuplePred tys _ -> IrredPred ev_ty getClassPredTys :: PredType -> (Class, [Type]) getClassPredTys ty = case getClassPredTys_maybe ty of Just (clas, tys) -> (clas, tys) Nothing -> pprPanic "getClassPredTys" (ppr ty) getClassPredTys_maybe :: PredType -> Maybe (Class, [Type]) getClassPredTys_maybe ty = case splitTyConApp_maybe ty of Just (tc, tys) | Just clas <- tyConClass_maybe tc -> Just (clas, tys) _ -> Nothing getEqPredTys :: PredType -> (Type, Type) getEqPredTys ty = case splitTyConApp_maybe ty of Just (tc, (_ : ty1 : ty2 : tys)) -> ASSERT( null tys && (tc `hasKey` eqTyConKey || tc `hasKey` coercibleTyConKey) ) (ty1, ty2) _ -> pprPanic "getEqPredTys" (ppr ty) getEqPredTys_maybe :: PredType -> Maybe (Role, Type, Type) getEqPredTys_maybe ty = case splitTyConApp_maybe ty of Just (tc, [_, ty1, ty2]) | tc `hasKey` eqTyConKey -> Just (Nominal, ty1, ty2) | tc `hasKey` coercibleTyConKey -> Just (Representational, ty1, ty2) _ -> Nothing getEqPredRole :: PredType -> Role getEqPredRole ty = case splitTyConApp_maybe ty of Just (tc, [_, _, _]) | tc `hasKey` eqTyConKey -> Nominal | tc `hasKey` coercibleTyConKey -> Representational _ -> pprPanic "getEqPredRole" (ppr ty) -- | Get the equality relation relevant for a pred type. predTypeEqRel :: PredType -> EqRel predTypeEqRel ty | Just (tc, _) <- splitTyConApp_maybe ty , tc `hasKey` coercibleTyConKey = ReprEq | otherwise = NomEq {- %************************************************************************ %* * Size * * ************************************************************************ -} typeSize :: Type -> Int typeSize (LitTy {}) = 1 typeSize (TyVarTy {}) = 1 typeSize (AppTy t1 t2) = typeSize t1 + typeSize t2 typeSize (FunTy t1 t2) = typeSize t1 + typeSize t2 typeSize (ForAllTy _ t) = 1 + typeSize t typeSize (TyConApp _ ts) = 1 + sum (map typeSize ts) {- ************************************************************************ * * \subsection{Type families} * * ************************************************************************ -} mkFamilyTyConApp :: TyCon -> [Type] -> Type -- ^ Given a family instance TyCon and its arg types, return the -- corresponding family type. E.g: -- -- > data family T a -- > data instance T (Maybe b) = MkT b -- -- Where the instance tycon is :RTL, so: -- -- > mkFamilyTyConApp :RTL Int = T (Maybe Int) mkFamilyTyConApp tc tys | Just (fam_tc, fam_tys) <- tyConFamInst_maybe tc , let tvs = tyConTyVars tc fam_subst = ASSERT2( length tvs == length tys, ppr tc <+> ppr tys ) zipTopTvSubst tvs tys = mkTyConApp fam_tc (substTys fam_subst fam_tys) | otherwise = mkTyConApp tc tys -- | Get the type on the LHS of a coercion induced by a type/data -- family instance. coAxNthLHS :: CoAxiom br -> Int -> Type coAxNthLHS ax ind = mkTyConApp (coAxiomTyCon ax) (coAxBranchLHS (coAxiomNthBranch ax ind)) -- | Pretty prints a 'TyCon', using the family instance in case of a -- representation tycon. For example: -- -- > data T [a] = ... -- -- In that case we want to print @T [a]@, where @T@ is the family 'TyCon' pprSourceTyCon :: TyCon -> SDoc pprSourceTyCon tycon | Just (fam_tc, tys) <- tyConFamInst_maybe tycon = ppr $ fam_tc `TyConApp` tys -- can't be FunTyCon | otherwise = ppr tycon {- ************************************************************************ * * \subsection{Liftedness} * * ************************************************************************ -} -- | See "Type#type_classification" for what an unlifted type is isUnLiftedType :: Type -> Bool -- isUnLiftedType returns True for forall'd unlifted types: -- x :: forall a. Int# -- I found bindings like these were getting floated to the top level. -- They are pretty bogus types, mind you. It would be better never to -- construct them isUnLiftedType ty | Just ty' <- coreView ty = isUnLiftedType ty' isUnLiftedType (ForAllTy _ ty) = isUnLiftedType ty isUnLiftedType (TyConApp tc _) = isUnLiftedTyCon tc isUnLiftedType _ = False isUnboxedTupleType :: Type -> Bool isUnboxedTupleType ty = case tyConAppTyCon_maybe ty of Just tc -> isUnboxedTupleTyCon tc _ -> False -- | See "Type#type_classification" for what an algebraic type is. -- Should only be applied to /types/, as opposed to e.g. partially -- saturated type constructors isAlgType :: Type -> Bool isAlgType ty = case splitTyConApp_maybe ty of Just (tc, ty_args) -> ASSERT( ty_args `lengthIs` tyConArity tc ) isAlgTyCon tc _other -> False -- | See "Type#type_classification" for what an algebraic type is. -- Should only be applied to /types/, as opposed to e.g. partially -- saturated type constructors. Closed type constructors are those -- with a fixed right hand side, as opposed to e.g. associated types isClosedAlgType :: Type -> Bool isClosedAlgType ty = case splitTyConApp_maybe ty of Just (tc, ty_args) | isAlgTyCon tc && not (isFamilyTyCon tc) -> ASSERT2( ty_args `lengthIs` tyConArity tc, ppr ty ) True _other -> False -- | Computes whether an argument (or let right hand side) should -- be computed strictly or lazily, based only on its type. -- Currently, it's just 'isUnLiftedType'. isStrictType :: Type -> Bool isStrictType = isUnLiftedType isPrimitiveType :: Type -> Bool -- ^ Returns true of types that are opaque to Haskell. isPrimitiveType ty = case splitTyConApp_maybe ty of Just (tc, ty_args) -> ASSERT( ty_args `lengthIs` tyConArity tc ) isPrimTyCon tc _ -> False {- ************************************************************************ * * \subsection{Sequencing on types} * * ************************************************************************ -} seqType :: Type -> () seqType (LitTy n) = n `seq` () seqType (TyVarTy tv) = tv `seq` () seqType (AppTy t1 t2) = seqType t1 `seq` seqType t2 seqType (FunTy t1 t2) = seqType t1 `seq` seqType t2 seqType (TyConApp tc tys) = tc `seq` seqTypes tys seqType (ForAllTy tv ty) = seqType (tyVarKind tv) `seq` seqType ty seqTypes :: [Type] -> () seqTypes [] = () seqTypes (ty:tys) = seqType ty `seq` seqTypes tys {- ************************************************************************ * * Comparison for types (We don't use instances so that we know where it happens) * * ************************************************************************ -} eqKind :: Kind -> Kind -> Bool -- Watch out for horrible hack: See Note [Comparison with OpenTypeKind] eqKind = eqType eqType :: Type -> Type -> Bool -- ^ Type equality on source types. Does not look through @newtypes@ or -- 'PredType's, but it does look through type synonyms. -- Watch out for horrible hack: See Note [Comparison with OpenTypeKind] eqType t1 t2 = isEqual $ cmpType t1 t2 instance Eq Type where (==) = eqType eqTypeX :: RnEnv2 -> Type -> Type -> Bool eqTypeX env t1 t2 = isEqual $ cmpTypeX env t1 t2 eqTypes :: [Type] -> [Type] -> Bool eqTypes tys1 tys2 = isEqual $ cmpTypes tys1 tys2 eqPred :: PredType -> PredType -> Bool eqPred = eqType eqPredX :: RnEnv2 -> PredType -> PredType -> Bool eqPredX env p1 p2 = isEqual $ cmpTypeX env p1 p2 eqTyVarBndrs :: RnEnv2 -> [TyVar] -> [TyVar] -> Maybe RnEnv2 -- Check that the tyvar lists are the same length -- and have matching kinds; if so, extend the RnEnv2 -- Returns Nothing if they don't match eqTyVarBndrs env [] [] = Just env eqTyVarBndrs env (tv1:tvs1) (tv2:tvs2) | eqTypeX env (tyVarKind tv1) (tyVarKind tv2) = eqTyVarBndrs (rnBndr2 env tv1 tv2) tvs1 tvs2 eqTyVarBndrs _ _ _= Nothing -- Now here comes the real worker cmpType :: Type -> Type -> Ordering -- Watch out for horrible hack: See Note [Comparison with OpenTypeKind] cmpType t1 t2 = cmpTypeX rn_env t1 t2 where rn_env = mkRnEnv2 (mkInScopeSet (tyVarsOfType t1 `unionVarSet` tyVarsOfType t2)) cmpTypes :: [Type] -> [Type] -> Ordering cmpTypes ts1 ts2 = cmpTypesX rn_env ts1 ts2 where rn_env = mkRnEnv2 (mkInScopeSet (tyVarsOfTypes ts1 `unionVarSet` tyVarsOfTypes ts2)) cmpPred :: PredType -> PredType -> Ordering cmpPred p1 p2 = cmpTypeX rn_env p1 p2 where rn_env = mkRnEnv2 (mkInScopeSet (tyVarsOfType p1 `unionVarSet` tyVarsOfType p2)) cmpTypeX :: RnEnv2 -> Type -> Type -> Ordering -- Main workhorse cmpTypeX env t1 t2 | Just t1' <- coreView t1 = cmpTypeX env t1' t2 | Just t2' <- coreView t2 = cmpTypeX env t1 t2' -- We expand predicate types, because in Core-land we have -- lots of definitions like -- fOrdBool :: Ord Bool -- fOrdBool = D:Ord .. .. .. -- So the RHS has a data type cmpTypeX env (TyVarTy tv1) (TyVarTy tv2) = rnOccL env tv1 `compare` rnOccR env tv2 cmpTypeX env (ForAllTy tv1 t1) (ForAllTy tv2 t2) = cmpTypeX env (tyVarKind tv1) (tyVarKind tv2) `thenCmp` cmpTypeX (rnBndr2 env tv1 tv2) t1 t2 cmpTypeX env (AppTy s1 t1) (AppTy s2 t2) = cmpTypeX env s1 s2 `thenCmp` cmpTypeX env t1 t2 cmpTypeX env (FunTy s1 t1) (FunTy s2 t2) = cmpTypeX env s1 s2 `thenCmp` cmpTypeX env t1 t2 cmpTypeX env (TyConApp tc1 tys1) (TyConApp tc2 tys2) = (tc1 `cmpTc` tc2) `thenCmp` cmpTypesX env tys1 tys2 cmpTypeX _ (LitTy l1) (LitTy l2) = compare l1 l2 -- Deal with the rest: TyVarTy < AppTy < FunTy < LitTy < TyConApp < ForAllTy < PredTy cmpTypeX _ (AppTy _ _) (TyVarTy _) = GT cmpTypeX _ (FunTy _ _) (TyVarTy _) = GT cmpTypeX _ (FunTy _ _) (AppTy _ _) = GT cmpTypeX _ (LitTy _) (TyVarTy _) = GT cmpTypeX _ (LitTy _) (AppTy _ _) = GT cmpTypeX _ (LitTy _) (FunTy _ _) = GT cmpTypeX _ (TyConApp _ _) (TyVarTy _) = GT cmpTypeX _ (TyConApp _ _) (AppTy _ _) = GT cmpTypeX _ (TyConApp _ _) (FunTy _ _) = GT cmpTypeX _ (TyConApp _ _) (LitTy _) = GT cmpTypeX _ (ForAllTy _ _) (TyVarTy _) = GT cmpTypeX _ (ForAllTy _ _) (AppTy _ _) = GT cmpTypeX _ (ForAllTy _ _) (FunTy _ _) = GT cmpTypeX _ (ForAllTy _ _) (LitTy _) = GT cmpTypeX _ (ForAllTy _ _) (TyConApp _ _) = GT cmpTypeX _ _ _ = LT ------------- cmpTypesX :: RnEnv2 -> [Type] -> [Type] -> Ordering cmpTypesX _ [] [] = EQ cmpTypesX env (t1:tys1) (t2:tys2) = cmpTypeX env t1 t2 `thenCmp` cmpTypesX env tys1 tys2 cmpTypesX _ [] _ = LT cmpTypesX _ _ [] = GT ------------- cmpTc :: TyCon -> TyCon -> Ordering -- Here we treat * and Constraint as equal -- See Note [Kind Constraint and kind *] in Kinds.lhs -- -- Also we treat OpenTypeKind as equal to either * or # -- See Note [Comparison with OpenTypeKind] cmpTc tc1 tc2 | u1 == openTypeKindTyConKey, isSubOpenTypeKindKey u2 = EQ | u2 == openTypeKindTyConKey, isSubOpenTypeKindKey u1 = EQ | otherwise = nu1 `compare` nu2 where u1 = tyConUnique tc1 nu1 = if u1==constraintKindTyConKey then liftedTypeKindTyConKey else u1 u2 = tyConUnique tc2 nu2 = if u2==constraintKindTyConKey then liftedTypeKindTyConKey else u2 {- Note [Comparison with OpenTypeKind] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In PrimOpWrappers we have things like PrimOpWrappers.mkWeak# = /\ a b c. Prim.mkWeak# a b c where Prim.mkWeak# :: forall (a:Open) b c. a -> b -> c -> State# RealWorld -> (# State# RealWorld, Weak# b #) Now, eta reduction will turn the definition into PrimOpWrappers.mkWeak# = Prim.mkWeak# which is kind-of OK, but now the types aren't really equal. So HACK HACK we pretend (in Core) that Open is equal to * or #. I hate this. Note [cmpTypeX] ~~~~~~~~~~~~~~~ When we compare foralls, we should look at the kinds. But if we do so, we get a corelint error like the following (in libraries/ghc-prim/GHC/PrimopWrappers.hs): Binder's type: forall (o_abY :: *). o_abY -> GHC.Prim.State# GHC.Prim.RealWorld -> GHC.Prim.State# GHC.Prim.RealWorld Rhs type: forall (a_12 :: ?). a_12 -> GHC.Prim.State# GHC.Prim.RealWorld -> GHC.Prim.State# GHC.Prim.RealWorld This is why we don't look at the kind. Maybe we should look if the kinds are compatible. -- cmpTypeX env (ForAllTy tv1 t1) (ForAllTy tv2 t2) -- = cmpTypeX env (tyVarKind tv1) (tyVarKind tv2) `thenCmp` -- cmpTypeX (rnBndr2 env tv1 tv2) t1 t2 ************************************************************************ * * Type substitutions * * ************************************************************************ -} emptyTvSubstEnv :: TvSubstEnv emptyTvSubstEnv = emptyVarEnv composeTvSubst :: InScopeSet -> TvSubstEnv -> TvSubstEnv -> TvSubstEnv -- ^ @(compose env1 env2)(x)@ is @env1(env2(x))@; i.e. apply @env2@ then @env1@. -- It assumes that both are idempotent. -- Typically, @env1@ is the refinement to a base substitution @env2@ composeTvSubst in_scope env1 env2 = env1 `plusVarEnv` mapVarEnv (substTy subst1) env2 -- First apply env1 to the range of env2 -- Then combine the two, making sure that env1 loses if -- both bind the same variable; that's why env1 is the -- *left* argument to plusVarEnv, because the right arg wins where subst1 = TvSubst in_scope env1 emptyTvSubst :: TvSubst emptyTvSubst = TvSubst emptyInScopeSet emptyTvSubstEnv isEmptyTvSubst :: TvSubst -> Bool -- See Note [Extending the TvSubstEnv] in TypeRep isEmptyTvSubst (TvSubst _ tenv) = isEmptyVarEnv tenv mkTvSubst :: InScopeSet -> TvSubstEnv -> TvSubst mkTvSubst = TvSubst getTvSubstEnv :: TvSubst -> TvSubstEnv getTvSubstEnv (TvSubst _ env) = env getTvInScope :: TvSubst -> InScopeSet getTvInScope (TvSubst in_scope _) = in_scope isInScope :: Var -> TvSubst -> Bool isInScope v (TvSubst in_scope _) = v `elemInScopeSet` in_scope notElemTvSubst :: CoVar -> TvSubst -> Bool notElemTvSubst v (TvSubst _ tenv) = not (v `elemVarEnv` tenv) setTvSubstEnv :: TvSubst -> TvSubstEnv -> TvSubst setTvSubstEnv (TvSubst in_scope _) tenv = TvSubst in_scope tenv zapTvSubstEnv :: TvSubst -> TvSubst zapTvSubstEnv (TvSubst in_scope _) = TvSubst in_scope emptyVarEnv extendTvInScope :: TvSubst -> Var -> TvSubst extendTvInScope (TvSubst in_scope tenv) var = TvSubst (extendInScopeSet in_scope var) tenv extendTvInScopeList :: TvSubst -> [Var] -> TvSubst extendTvInScopeList (TvSubst in_scope tenv) vars = TvSubst (extendInScopeSetList in_scope vars) tenv extendTvSubst :: TvSubst -> TyVar -> Type -> TvSubst extendTvSubst (TvSubst in_scope tenv) tv ty = TvSubst in_scope (extendVarEnv tenv tv ty) extendTvSubstList :: TvSubst -> [TyVar] -> [Type] -> TvSubst extendTvSubstList (TvSubst in_scope tenv) tvs tys = TvSubst in_scope (extendVarEnvList tenv (tvs `zip` tys)) unionTvSubst :: TvSubst -> TvSubst -> TvSubst -- Works when the ranges are disjoint unionTvSubst (TvSubst in_scope1 tenv1) (TvSubst in_scope2 tenv2) = ASSERT( not (tenv1 `intersectsVarEnv` tenv2) ) TvSubst (in_scope1 `unionInScope` in_scope2) (tenv1 `plusVarEnv` tenv2) -- mkOpenTvSubst and zipOpenTvSubst generate the in-scope set from -- the types given; but it's just a thunk so with a bit of luck -- it'll never be evaluated -- Note [Generating the in-scope set for a substitution] -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -- If we want to substitute [a -> ty1, b -> ty2] I used to -- think it was enough to generate an in-scope set that includes -- fv(ty1,ty2). But that's not enough; we really should also take the -- free vars of the type we are substituting into! Example: -- (forall b. (a,b,x)) [a -> List b] -- Then if we use the in-scope set {b}, there is a danger we will rename -- the forall'd variable to 'x' by mistake, getting this: -- (forall x. (List b, x, x) -- Urk! This means looking at all the calls to mkOpenTvSubst.... -- | Generates the in-scope set for the 'TvSubst' from the types in the incoming -- environment, hence "open" mkOpenTvSubst :: TvSubstEnv -> TvSubst mkOpenTvSubst tenv = TvSubst (mkInScopeSet (tyVarsOfTypes (varEnvElts tenv))) tenv -- | Generates the in-scope set for the 'TvSubst' from the types in the incoming -- environment, hence "open" zipOpenTvSubst :: [TyVar] -> [Type] -> TvSubst zipOpenTvSubst tyvars tys | debugIsOn && (length tyvars /= length tys) = pprTrace "zipOpenTvSubst" (ppr tyvars $$ ppr tys) emptyTvSubst | otherwise = TvSubst (mkInScopeSet (tyVarsOfTypes tys)) (zipTyEnv tyvars tys) -- | Called when doing top-level substitutions. Here we expect that the -- free vars of the range of the substitution will be empty. mkTopTvSubst :: [(TyVar, Type)] -> TvSubst mkTopTvSubst prs = TvSubst emptyInScopeSet (mkVarEnv prs) zipTopTvSubst :: [TyVar] -> [Type] -> TvSubst zipTopTvSubst tyvars tys | debugIsOn && (length tyvars /= length tys) = pprTrace "zipTopTvSubst" (ppr tyvars $$ ppr tys) emptyTvSubst | otherwise = TvSubst emptyInScopeSet (zipTyEnv tyvars tys) zipTyEnv :: [TyVar] -> [Type] -> TvSubstEnv zipTyEnv tyvars tys | debugIsOn && (length tyvars /= length tys) = pprTrace "zipTyEnv" (ppr tyvars $$ ppr tys) emptyVarEnv | otherwise = zip_ty_env tyvars tys emptyVarEnv -- Later substitutions in the list over-ride earlier ones, -- but there should be no loops zip_ty_env :: [TyVar] -> [Type] -> TvSubstEnv -> TvSubstEnv zip_ty_env [] [] env = env zip_ty_env (tv:tvs) (ty:tys) env = zip_ty_env tvs tys (extendVarEnv env tv ty) -- There used to be a special case for when -- ty == TyVarTy tv -- (a not-uncommon case) in which case the substitution was dropped. -- But the type-tidier changes the print-name of a type variable without -- changing the unique, and that led to a bug. Why? Pre-tidying, we had -- a type {Foo t}, where Foo is a one-method class. So Foo is really a newtype. -- And it happened that t was the type variable of the class. Post-tiding, -- it got turned into {Foo t2}. The ext-core printer expanded this using -- sourceTypeRep, but that said "Oh, t == t2" because they have the same unique, -- and so generated a rep type mentioning t not t2. -- -- Simplest fix is to nuke the "optimisation" zip_ty_env tvs tys env = pprTrace "Var/Type length mismatch: " (ppr tvs $$ ppr tys) env -- zip_ty_env _ _ env = env instance Outputable TvSubst where ppr (TvSubst ins tenv) = brackets $ sep[ ptext (sLit "TvSubst"), nest 2 (ptext (sLit "In scope:") <+> ppr ins), nest 2 (ptext (sLit "Type env:") <+> ppr tenv) ] {- ************************************************************************ * * Performing type or kind substitutions * * ************************************************************************ -} -- | Type substitution making use of an 'TvSubst' that -- is assumed to be open, see 'zipOpenTvSubst' substTyWith :: [TyVar] -> [Type] -> Type -> Type substTyWith tvs tys = ASSERT( length tvs == length tys ) substTy (zipOpenTvSubst tvs tys) substKiWith :: [KindVar] -> [Kind] -> Kind -> Kind substKiWith = substTyWith -- | Type substitution making use of an 'TvSubst' that -- is assumed to be open, see 'zipOpenTvSubst' substTysWith :: [TyVar] -> [Type] -> [Type] -> [Type] substTysWith tvs tys = ASSERT( length tvs == length tys ) substTys (zipOpenTvSubst tvs tys) substKisWith :: [KindVar] -> [Kind] -> [Kind] -> [Kind] substKisWith = substTysWith -- | Substitute within a 'Type' substTy :: TvSubst -> Type -> Type substTy subst ty | isEmptyTvSubst subst = ty | otherwise = subst_ty subst ty -- | Substitute within several 'Type's substTys :: TvSubst -> [Type] -> [Type] substTys subst tys | isEmptyTvSubst subst = tys | otherwise = map (subst_ty subst) tys -- | Substitute within a 'ThetaType' substTheta :: TvSubst -> ThetaType -> ThetaType substTheta subst theta | isEmptyTvSubst subst = theta | otherwise = map (substTy subst) theta -- | Remove any nested binders mentioning the 'TyVar's in the 'TyVarSet' deShadowTy :: TyVarSet -> Type -> Type deShadowTy tvs ty = subst_ty (mkTvSubst in_scope emptyTvSubstEnv) ty where in_scope = mkInScopeSet tvs subst_ty :: TvSubst -> Type -> Type -- subst_ty is the main workhorse for type substitution -- -- Note that the in_scope set is poked only if we hit a forall -- so it may often never be fully computed subst_ty subst ty = go ty where go (LitTy n) = n `seq` LitTy n go (TyVarTy tv) = substTyVar subst tv go (TyConApp tc tys) = let args = map go tys in args `seqList` TyConApp tc args go (FunTy arg res) = (FunTy $! (go arg)) $! (go res) go (AppTy fun arg) = mkAppTy (go fun) $! (go arg) -- The mkAppTy smart constructor is important -- we might be replacing (a Int), represented with App -- by [Int], represented with TyConApp go (ForAllTy tv ty) = case substTyVarBndr subst tv of (subst', tv') -> ForAllTy tv' $! (subst_ty subst' ty) substTyVar :: TvSubst -> TyVar -> Type substTyVar (TvSubst _ tenv) tv | Just ty <- lookupVarEnv tenv tv = ty -- See Note [Apply Once] | otherwise = ASSERT( isTyVar tv ) TyVarTy tv -- in TypeRep -- We do not require that the tyvar is in scope -- Reason: we do quite a bit of (substTyWith [tv] [ty] tau) -- and it's a nuisance to bring all the free vars of tau into -- scope --- and then force that thunk at every tyvar -- Instead we have an ASSERT in substTyVarBndr to check for capture substTyVars :: TvSubst -> [TyVar] -> [Type] substTyVars subst tvs = map (substTyVar subst) tvs lookupTyVar :: TvSubst -> TyVar -> Maybe Type -- See Note [Extending the TvSubst] in TypeRep lookupTyVar (TvSubst _ tenv) tv = lookupVarEnv tenv tv substTyVarBndr :: TvSubst -> TyVar -> (TvSubst, TyVar) substTyVarBndr subst@(TvSubst in_scope tenv) old_var = ASSERT2( _no_capture, ppr old_var $$ ppr subst ) (TvSubst (in_scope `extendInScopeSet` new_var) new_env, new_var) where new_env | no_change = delVarEnv tenv old_var | otherwise = extendVarEnv tenv old_var (TyVarTy new_var) _no_capture = not (new_var `elemVarSet` tyVarsOfTypes (varEnvElts tenv)) -- Assertion check that we are not capturing something in the substitution old_ki = tyVarKind old_var no_kind_change = isEmptyVarSet (tyVarsOfType old_ki) -- verify that kind is closed no_change = no_kind_change && (new_var == old_var) -- no_change means that the new_var is identical in -- all respects to the old_var (same unique, same kind) -- See Note [Extending the TvSubst] in TypeRep -- -- In that case we don't need to extend the substitution -- to map old to new. But instead we must zap any -- current substitution for the variable. For example: -- (\x.e) with id_subst = [x |-> e'] -- Here we must simply zap the substitution for x new_var | no_kind_change = uniqAway in_scope old_var | otherwise = uniqAway in_scope $ updateTyVarKind (substTy subst) old_var -- The uniqAway part makes sure the new variable is not already in scope cloneTyVarBndr :: TvSubst -> TyVar -> Unique -> (TvSubst, TyVar) cloneTyVarBndr (TvSubst in_scope tv_env) tv uniq = (TvSubst (extendInScopeSet in_scope tv') (extendVarEnv tv_env tv (mkTyVarTy tv')), tv') where tv' = setVarUnique tv uniq -- Simply set the unique; the kind -- has no type variables to worry about {- ---------------------------------------------------- -- Kind Stuff Kinds ~~~~~ For the description of subkinding in GHC, see http://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/TypeType#Kinds -} type MetaKindVar = TyVar -- invariant: MetaKindVar will always be a -- TcTyVar with details MetaTv (TauTv ...) ... -- meta kind var constructors and functions are in TcType type SimpleKind = Kind {- ************************************************************************ * * The kind of a type * * ************************************************************************ -} typeKind :: Type -> Kind typeKind orig_ty = go orig_ty where go ty@(TyConApp tc tys) | isPromotedTyCon tc = ASSERT( tyConArity tc == length tys ) superKind | otherwise = kindAppResult (ptext (sLit "typeKind 1") <+> ppr ty $$ ppr orig_ty) (tyConKind tc) tys go ty@(AppTy fun arg) = kindAppResult (ptext (sLit "typeKind 2") <+> ppr ty $$ ppr orig_ty) (go fun) [arg] go (LitTy l) = typeLiteralKind l go (ForAllTy _ ty) = go ty go (TyVarTy tyvar) = tyVarKind tyvar go _ty@(FunTy _arg res) -- Hack alert. The kind of (Int -> Int#) is liftedTypeKind (*), -- not unliftedTypeKind (#) -- The only things that can be after a function arrow are -- (a) types (of kind openTypeKind or its sub-kinds) -- (b) kinds (of super-kind TY) (e.g. * -> (* -> *)) | isSuperKind k = k | otherwise = ASSERT2( isSubOpenTypeKind k, ppr _ty $$ ppr k ) liftedTypeKind where k = go res typeLiteralKind :: TyLit -> Kind typeLiteralKind l = case l of NumTyLit _ -> typeNatKind StrTyLit _ -> typeSymbolKind {- Kind inference ~~~~~~~~~~~~~~ During kind inference, a kind variable unifies only with a "simple kind", sk sk ::= * | sk1 -> sk2 For example data T a = MkT a (T Int#) fails. We give T the kind (k -> *), and the kind variable k won't unify with # (the kind of Int#). Type inference ~~~~~~~~~~~~~~ When creating a fresh internal type variable, we give it a kind to express constraints on it. E.g. in (\x->e) we make up a fresh type variable for x, with kind ??. During unification we only bind an internal type variable to a type whose kind is lower in the sub-kind hierarchy than the kind of the tyvar. When unifying two internal type variables, we collect their kind constraints by finding the GLB of the two. Since the partial order is a tree, they only have a glb if one is a sub-kind of the other. In that case, we bind the less-informative one to the more informative one. Neat, eh? -}
forked-upstream-packages-for-ghcjs/ghc
compiler/types/Type.hs
bsd-3-clause
66,016
0
14
17,989
13,143
6,909
6,234
-1
-1
{-# LANGUAGE PolyKinds, DataKinds, TypeOperators, TypeFamilies, GADTs, PartialTypeSignatures #-} module T13035 where newtype MyAttr a b = MyAttr { _unMyAttr :: MyFun (a b) } type MyRec a b = Rec (MyAttr a) b type family MyFun (a :: k1) :: k2 data GY (a :: k1) (b :: k2) (c :: k1 -> k3) (d :: k1) data GNone (a :: k1) type family GYTF a where GYTF (GY a b _ a) = b GYTF (GY _ _ c d) = MyFun (c d) type instance MyFun (GY a b c d) = GYTF (GY a b c d) type family GNoneTF (a :: k1) :: k2 where type instance MyFun (GNone a) = GNoneTF a type (a :: k1) =: (b :: k2) = a `GY` b type (a :: j1 -> j2) $ (b :: j1) = a b infixr 0 $ infixr 9 =: data FConst (a :: *) (b :: Fields) data FApply (a :: * -> * -> *) b c (d :: Fields) data FMap (a :: * -> *) b (d :: Fields) type instance MyFun (FConst a b) = a type instance MyFun (FApply b c d a) = b (MyFun (c a)) (MyFun (d a)) type instance MyFun (FMap b c a) = b (MyFun (c a)) data Fields = Name | Author | Image | Description | Ingredients | Instructions | CookTime | PrepTime | TotalTime | Yield | Nutrition | Tags | Url | Section | Items | Subsections | Calories | Carbohydrates | Cholesterol | Fat | Fiber | Protien | SaturatedFat | Sodium | Sugar | TransFat | UnsaturatedFat | ServingSize data Rec :: (u -> *) -> [u] -> * where RNil :: Rec f '[] (:&) :: !(f r) -> !(Rec f rs) -> Rec f (r ': rs) data family Sing (a :: k) data instance Sing (z_a6bn :: Fields) = z_a6bn ~ Name => SName | z_a6bn ~ Author => SAuthor | z_a6bn ~ Image => SImage | z_a6bn ~ Description => SDescription | z_a6bn ~ Ingredients => SIngredients | z_a6bn ~ Instructions => SInstructions | z_a6bn ~ CookTime => SCookTime | z_a6bn ~ PrepTime => SPrepTime | z_a6bn ~ TotalTime => STotalTime | z_a6bn ~ Yield => SYield | z_a6bn ~ Nutrition => SNutrition | z_a6bn ~ Tags => STags | z_a6bn ~ Url => SUrl | z_a6bn ~ Section => SSection | z_a6bn ~ Items => SItems | z_a6bn ~ Subsections => SSubsections | z_a6bn ~ Calories => SCalories | z_a6bn ~ Carbohydrates => SCarbohydrates | z_a6bn ~ Cholesterol => SCholesterol | z_a6bn ~ Fat => SFat | z_a6bn ~ Fiber => SFiber | z_a6bn ~ Protien => SProtien | z_a6bn ~ SaturatedFat => SSaturatedFat | z_a6bn ~ Sodium => SSodium | z_a6bn ~ Sugar => SSugar | z_a6bn ~ TransFat => STransFat | z_a6bn ~ UnsaturatedFat => SUnsaturatedFat | z_a6bn ~ ServingSize => SServingSize (=::) :: sing f -> MyFun (a f) -> MyAttr a f _ =:: x = MyAttr x type NutritionT = Calories =: Maybe Int $ Carbohydrates =: Maybe Int $ Cholesterol =: Maybe Int $ Fat =: Maybe Int $ Fiber =: Maybe Int $ Protien =: Maybe Int $ SaturatedFat =: Maybe Int $ Sodium =: Maybe Int $ Sugar =: Maybe Int $ TransFat =: Maybe Int $ UnsaturatedFat =: Maybe Int $ ServingSize =: String $ GNone type NutritionRec = MyRec NutritionT ['Calories, 'Carbohydrates, 'Cholesterol, 'Fat, 'Fiber, 'Protien, 'SaturatedFat, 'Sodium, 'Sugar, 'TransFat, 'UnsaturatedFat, 'ServingSize] type RecipeT = Name =: String $ Author =: String $ Image =: String $ Description =: String $ CookTime =: Maybe Int $ PrepTime =: Maybe Int $ TotalTime =: Maybe Int $ Yield =: String $ Nutrition =: NutritionRec $ Tags =: [String] $ Url =: String $ GNone type RecipeFormatter = FApply (->) (FConst [String]) (FMap IO RecipeT) g :: MyRec RecipeFormatter _ --'[ 'Author ] Uncomment to prevent loop g = SAuthor =:: (\a -> return "Hi") :& RNil
ezyang/ghc
testsuite/tests/perf/compiler/T13035.hs
bsd-3-clause
4,275
0
28
1,627
1,432
784
648
-1
-1
{-# LANGUAGE CPP, MagicHash #-} {-# OPTIONS_GHC -funbox-strict-fields #-} -- -- (c) The University of Glasgow 2002-2006 -- -- | ByteCodeInstrs: Bytecode instruction definitions module ByteCodeInstr ( BCInstr(..), ProtoBCO(..), bciStackUse, ) where #include "HsVersions.h" #include "../includes/MachDeps.h" import GhcPrelude import ByteCodeTypes import GHCi.RemoteTypes import GHCi.FFI (C_ffi_cif) import StgCmmLayout ( ArgRep(..) ) import PprCore import Outputable import FastString import Name import Unique import Id import CoreSyn import Literal import DataCon import VarSet import PrimOp import SMRep import Data.Word import GHC.Stack.CCS (CostCentre) -- ---------------------------------------------------------------------------- -- Bytecode instructions data ProtoBCO a = ProtoBCO { protoBCOName :: a, -- name, in some sense protoBCOInstrs :: [BCInstr], -- instrs -- arity and GC info protoBCOBitmap :: [StgWord], protoBCOBitmapSize :: Word16, protoBCOArity :: Int, -- what the BCO came from protoBCOExpr :: Either [AnnAlt Id DVarSet] (AnnExpr Id DVarSet), -- malloc'd pointers protoBCOFFIs :: [FFIInfo] } type LocalLabel = Word16 data BCInstr -- Messing with the stack = STKCHECK Word -- Push locals (existing bits of the stack) | PUSH_L !Word16{-offset-} | PUSH_LL !Word16 !Word16{-2 offsets-} | PUSH_LLL !Word16 !Word16 !Word16{-3 offsets-} -- Push the specified local as a 8, 16, 32 bit value onto the stack. (i.e., -- the stack will grow by 8, 16 or 32 bits) | PUSH8 !Word16 | PUSH16 !Word16 | PUSH32 !Word16 -- Push the specifiec local as a 8, 16, 32 bit value onto the stack, but the -- value will take the whole word on the stack (i.e., the stack will gorw by -- a word) -- This is useful when extracting a packed constructor field for further use. -- Currently we expect all values on the stack to take full words, except for -- the ones used for PACK (i.e., actually constracting new data types, in -- which case we use PUSH{8,16,32}) | PUSH8_W !Word16 | PUSH16_W !Word16 | PUSH32_W !Word16 -- Push a ptr (these all map to PUSH_G really) | PUSH_G Name | PUSH_PRIMOP PrimOp | PUSH_BCO (ProtoBCO Name) -- Push an alt continuation | PUSH_ALTS (ProtoBCO Name) | PUSH_ALTS_UNLIFTED (ProtoBCO Name) ArgRep -- Pushing 8, 16 and 32 bits of padding (for constructors). | PUSH_PAD8 | PUSH_PAD16 | PUSH_PAD32 -- Pushing literals | PUSH_UBX8 Literal | PUSH_UBX16 Literal | PUSH_UBX32 Literal | PUSH_UBX Literal Word16 -- push this int/float/double/addr, on the stack. Word16 -- is # of words to copy from literal pool. Eitherness reflects -- the difficulty of dealing with MachAddr here, mostly due to -- the excessive (and unnecessary) restrictions imposed by the -- designers of the new Foreign library. In particular it is -- quite impossible to convert an Addr to any other integral -- type, and it appears impossible to get hold of the bits of -- an addr, even though we need to assemble BCOs. -- various kinds of application | PUSH_APPLY_N | PUSH_APPLY_V | PUSH_APPLY_F | PUSH_APPLY_D | PUSH_APPLY_L | PUSH_APPLY_P | PUSH_APPLY_PP | PUSH_APPLY_PPP | PUSH_APPLY_PPPP | PUSH_APPLY_PPPPP | PUSH_APPLY_PPPPPP | SLIDE Word16{-this many-} Word16{-down by this much-} -- To do with the heap | ALLOC_AP !Word16 -- make an AP with this many payload words | ALLOC_AP_NOUPD !Word16 -- make an AP_NOUPD with this many payload words | ALLOC_PAP !Word16 !Word16 -- make a PAP with this arity / payload words | MKAP !Word16{-ptr to AP is this far down stack-} !Word16{-number of words-} | MKPAP !Word16{-ptr to PAP is this far down stack-} !Word16{-number of words-} | UNPACK !Word16 -- unpack N words from t.o.s Constr | PACK DataCon !Word16 -- after assembly, the DataCon is an index into the -- itbl array -- For doing case trees | LABEL LocalLabel | TESTLT_I Int LocalLabel | TESTEQ_I Int LocalLabel | TESTLT_W Word LocalLabel | TESTEQ_W Word LocalLabel | TESTLT_F Float LocalLabel | TESTEQ_F Float LocalLabel | TESTLT_D Double LocalLabel | TESTEQ_D Double LocalLabel -- The Word16 value is a constructor number and therefore -- stored in the insn stream rather than as an offset into -- the literal pool. | TESTLT_P Word16 LocalLabel | TESTEQ_P Word16 LocalLabel | CASEFAIL | JMP LocalLabel -- For doing calls to C (via glue code generated by libffi) | CCALL Word16 -- stack frame size (RemotePtr C_ffi_cif) -- addr of the glue code Word16 -- flags. -- -- 0x1: call is interruptible -- 0x2: call is unsafe -- -- (XXX: inefficient, but I don't know -- what the alignment constraints are.) -- For doing magic ByteArray passing to foreign calls | SWIZZLE Word16 -- to the ptr N words down the stack, Word16 -- add M (interpreted as a signed 16-bit entity) -- To Infinity And Beyond | ENTER | RETURN -- return a lifted value | RETURN_UBX ArgRep -- return an unlifted value, here's its rep -- Breakpoints | BRK_FUN Word16 Unique (RemotePtr CostCentre) -- ----------------------------------------------------------------------------- -- Printing bytecode instructions instance Outputable a => Outputable (ProtoBCO a) where ppr (ProtoBCO name instrs bitmap bsize arity origin ffis) = (text "ProtoBCO" <+> ppr name <> char '#' <> int arity <+> text (show ffis) <> colon) $$ nest 3 (case origin of Left alts -> vcat (zipWith (<+>) (char '{' : repeat (char ';')) (map (pprCoreAltShort.deAnnAlt) alts)) <+> char '}' Right rhs -> pprCoreExprShort (deAnnotate rhs)) $$ nest 3 (text "bitmap: " <+> text (show bsize) <+> ppr bitmap) $$ nest 3 (vcat (map ppr instrs)) -- Print enough of the Core expression to enable the reader to find -- the expression in the -ddump-prep output. That is, we need to -- include at least a binder. pprCoreExprShort :: CoreExpr -> SDoc pprCoreExprShort expr@(Lam _ _) = let (bndrs, _) = collectBinders expr in char '\\' <+> sep (map (pprBndr LambdaBind) bndrs) <+> arrow <+> text "..." pprCoreExprShort (Case _expr var _ty _alts) = text "case of" <+> ppr var pprCoreExprShort (Let (NonRec x _) _) = text "let" <+> ppr x <+> ptext (sLit ("= ... in ...")) pprCoreExprShort (Let (Rec bs) _) = text "let {" <+> ppr (fst (head bs)) <+> ptext (sLit ("= ...; ... } in ...")) pprCoreExprShort (Tick t e) = ppr t <+> pprCoreExprShort e pprCoreExprShort (Cast e _) = pprCoreExprShort e <+> text "`cast` T" pprCoreExprShort e = pprCoreExpr e pprCoreAltShort :: CoreAlt -> SDoc pprCoreAltShort (con, args, expr) = ppr con <+> sep (map ppr args) <+> text "->" <+> pprCoreExprShort expr instance Outputable BCInstr where ppr (STKCHECK n) = text "STKCHECK" <+> ppr n ppr (PUSH_L offset) = text "PUSH_L " <+> ppr offset ppr (PUSH_LL o1 o2) = text "PUSH_LL " <+> ppr o1 <+> ppr o2 ppr (PUSH_LLL o1 o2 o3) = text "PUSH_LLL" <+> ppr o1 <+> ppr o2 <+> ppr o3 ppr (PUSH8 offset) = text "PUSH8 " <+> ppr offset ppr (PUSH16 offset) = text "PUSH16 " <+> ppr offset ppr (PUSH32 offset) = text "PUSH32 " <+> ppr offset ppr (PUSH8_W offset) = text "PUSH8_W " <+> ppr offset ppr (PUSH16_W offset) = text "PUSH16_W " <+> ppr offset ppr (PUSH32_W offset) = text "PUSH32_W " <+> ppr offset ppr (PUSH_G nm) = text "PUSH_G " <+> ppr nm ppr (PUSH_PRIMOP op) = text "PUSH_G " <+> text "GHC.PrimopWrappers." <> ppr op ppr (PUSH_BCO bco) = hang (text "PUSH_BCO") 2 (ppr bco) ppr (PUSH_ALTS bco) = hang (text "PUSH_ALTS") 2 (ppr bco) ppr (PUSH_ALTS_UNLIFTED bco pk) = hang (text "PUSH_ALTS_UNLIFTED" <+> ppr pk) 2 (ppr bco) ppr PUSH_PAD8 = text "PUSH_PAD8" ppr PUSH_PAD16 = text "PUSH_PAD16" ppr PUSH_PAD32 = text "PUSH_PAD32" ppr (PUSH_UBX8 lit) = text "PUSH_UBX8" <+> ppr lit ppr (PUSH_UBX16 lit) = text "PUSH_UBX16" <+> ppr lit ppr (PUSH_UBX32 lit) = text "PUSH_UBX32" <+> ppr lit ppr (PUSH_UBX lit nw) = text "PUSH_UBX" <+> parens (ppr nw) <+> ppr lit ppr PUSH_APPLY_N = text "PUSH_APPLY_N" ppr PUSH_APPLY_V = text "PUSH_APPLY_V" ppr PUSH_APPLY_F = text "PUSH_APPLY_F" ppr PUSH_APPLY_D = text "PUSH_APPLY_D" ppr PUSH_APPLY_L = text "PUSH_APPLY_L" ppr PUSH_APPLY_P = text "PUSH_APPLY_P" ppr PUSH_APPLY_PP = text "PUSH_APPLY_PP" ppr PUSH_APPLY_PPP = text "PUSH_APPLY_PPP" ppr PUSH_APPLY_PPPP = text "PUSH_APPLY_PPPP" ppr PUSH_APPLY_PPPPP = text "PUSH_APPLY_PPPPP" ppr PUSH_APPLY_PPPPPP = text "PUSH_APPLY_PPPPPP" ppr (SLIDE n d) = text "SLIDE " <+> ppr n <+> ppr d ppr (ALLOC_AP sz) = text "ALLOC_AP " <+> ppr sz ppr (ALLOC_AP_NOUPD sz) = text "ALLOC_AP_NOUPD " <+> ppr sz ppr (ALLOC_PAP arity sz) = text "ALLOC_PAP " <+> ppr arity <+> ppr sz ppr (MKAP offset sz) = text "MKAP " <+> ppr sz <+> text "words," <+> ppr offset <+> text "stkoff" ppr (MKPAP offset sz) = text "MKPAP " <+> ppr sz <+> text "words," <+> ppr offset <+> text "stkoff" ppr (UNPACK sz) = text "UNPACK " <+> ppr sz ppr (PACK dcon sz) = text "PACK " <+> ppr dcon <+> ppr sz ppr (LABEL lab) = text "__" <> ppr lab <> colon ppr (TESTLT_I i lab) = text "TESTLT_I" <+> int i <+> text "__" <> ppr lab ppr (TESTEQ_I i lab) = text "TESTEQ_I" <+> int i <+> text "__" <> ppr lab ppr (TESTLT_W i lab) = text "TESTLT_W" <+> int (fromIntegral i) <+> text "__" <> ppr lab ppr (TESTEQ_W i lab) = text "TESTEQ_W" <+> int (fromIntegral i) <+> text "__" <> ppr lab ppr (TESTLT_F f lab) = text "TESTLT_F" <+> float f <+> text "__" <> ppr lab ppr (TESTEQ_F f lab) = text "TESTEQ_F" <+> float f <+> text "__" <> ppr lab ppr (TESTLT_D d lab) = text "TESTLT_D" <+> double d <+> text "__" <> ppr lab ppr (TESTEQ_D d lab) = text "TESTEQ_D" <+> double d <+> text "__" <> ppr lab ppr (TESTLT_P i lab) = text "TESTLT_P" <+> ppr i <+> text "__" <> ppr lab ppr (TESTEQ_P i lab) = text "TESTEQ_P" <+> ppr i <+> text "__" <> ppr lab ppr CASEFAIL = text "CASEFAIL" ppr (JMP lab) = text "JMP" <+> ppr lab ppr (CCALL off marshall_addr flags) = text "CCALL " <+> ppr off <+> text "marshall code at" <+> text (show marshall_addr) <+> (case flags of 0x1 -> text "(interruptible)" 0x2 -> text "(unsafe)" _ -> empty) ppr (SWIZZLE stkoff n) = text "SWIZZLE " <+> text "stkoff" <+> ppr stkoff <+> text "by" <+> ppr n ppr ENTER = text "ENTER" ppr RETURN = text "RETURN" ppr (RETURN_UBX pk) = text "RETURN_UBX " <+> ppr pk ppr (BRK_FUN index uniq _cc) = text "BRK_FUN" <+> ppr index <+> ppr uniq <+> text "<cc>" -- ----------------------------------------------------------------------------- -- The stack use, in words, of each bytecode insn. These _must_ be -- correct, or overestimates of reality, to be safe. -- NOTE: we aggregate the stack use from case alternatives too, so that -- we can do a single stack check at the beginning of a function only. -- This could all be made more accurate by keeping track of a proper -- stack high water mark, but it doesn't seem worth the hassle. protoBCOStackUse :: ProtoBCO a -> Word protoBCOStackUse bco = sum (map bciStackUse (protoBCOInstrs bco)) bciStackUse :: BCInstr -> Word bciStackUse STKCHECK{} = 0 bciStackUse PUSH_L{} = 1 bciStackUse PUSH_LL{} = 2 bciStackUse PUSH_LLL{} = 3 bciStackUse PUSH8{} = 1 -- overapproximation bciStackUse PUSH16{} = 1 -- overapproximation bciStackUse PUSH32{} = 1 -- overapproximation on 64bit arch bciStackUse PUSH8_W{} = 1 -- takes exactly 1 word bciStackUse PUSH16_W{} = 1 -- takes exactly 1 word bciStackUse PUSH32_W{} = 1 -- takes exactly 1 word bciStackUse PUSH_G{} = 1 bciStackUse PUSH_PRIMOP{} = 1 bciStackUse PUSH_BCO{} = 1 bciStackUse (PUSH_ALTS bco) = 2 + protoBCOStackUse bco bciStackUse (PUSH_ALTS_UNLIFTED bco _) = 2 + protoBCOStackUse bco bciStackUse (PUSH_PAD8) = 1 -- overapproximation bciStackUse (PUSH_PAD16) = 1 -- overapproximation bciStackUse (PUSH_PAD32) = 1 -- overapproximation on 64bit arch bciStackUse (PUSH_UBX8 _) = 1 -- overapproximation bciStackUse (PUSH_UBX16 _) = 1 -- overapproximation bciStackUse (PUSH_UBX32 _) = 1 -- overapproximation on 64bit arch bciStackUse (PUSH_UBX _ nw) = fromIntegral nw bciStackUse PUSH_APPLY_N{} = 1 bciStackUse PUSH_APPLY_V{} = 1 bciStackUse PUSH_APPLY_F{} = 1 bciStackUse PUSH_APPLY_D{} = 1 bciStackUse PUSH_APPLY_L{} = 1 bciStackUse PUSH_APPLY_P{} = 1 bciStackUse PUSH_APPLY_PP{} = 1 bciStackUse PUSH_APPLY_PPP{} = 1 bciStackUse PUSH_APPLY_PPPP{} = 1 bciStackUse PUSH_APPLY_PPPPP{} = 1 bciStackUse PUSH_APPLY_PPPPPP{} = 1 bciStackUse ALLOC_AP{} = 1 bciStackUse ALLOC_AP_NOUPD{} = 1 bciStackUse ALLOC_PAP{} = 1 bciStackUse (UNPACK sz) = fromIntegral sz bciStackUse LABEL{} = 0 bciStackUse TESTLT_I{} = 0 bciStackUse TESTEQ_I{} = 0 bciStackUse TESTLT_W{} = 0 bciStackUse TESTEQ_W{} = 0 bciStackUse TESTLT_F{} = 0 bciStackUse TESTEQ_F{} = 0 bciStackUse TESTLT_D{} = 0 bciStackUse TESTEQ_D{} = 0 bciStackUse TESTLT_P{} = 0 bciStackUse TESTEQ_P{} = 0 bciStackUse CASEFAIL{} = 0 bciStackUse JMP{} = 0 bciStackUse ENTER{} = 0 bciStackUse RETURN{} = 0 bciStackUse RETURN_UBX{} = 1 bciStackUse CCALL{} = 0 bciStackUse SWIZZLE{} = 0 bciStackUse BRK_FUN{} = 0 -- These insns actually reduce stack use, but we need the high-tide level, -- so can't use this info. Not that it matters much. bciStackUse SLIDE{} = 0 bciStackUse MKAP{} = 0 bciStackUse MKPAP{} = 0 bciStackUse PACK{} = 1 -- worst case is PACK 0 words
ezyang/ghc
compiler/ghci/ByteCodeInstr.hs
bsd-3-clause
15,696
0
22
4,982
3,725
1,876
1,849
299
1
{-# htermination print :: Show a => a -> IO () #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_print_1.hs
mit
51
0
2
12
3
2
1
1
0
{-# htermination (enumFromToChar :: Char -> Char -> (List Char)) #-} import qualified Prelude data MyBool = MyTrue | MyFalse data List a = Cons a (List a) | Nil data Char = Char MyInt ; data MyInt = Pos Nat | Neg Nat ; data Nat = Succ Nat | Zero ; data Ordering = LT | EQ | GT ; data WHNF a = WHNF a ; flip :: (a -> c -> b) -> c -> a -> b; flip f x y = f y x; primCmpNat :: Nat -> Nat -> Ordering; primCmpNat Zero Zero = EQ; primCmpNat Zero (Succ y) = LT; primCmpNat (Succ x) Zero = GT; primCmpNat (Succ x) (Succ y) = primCmpNat x y; primCmpInt :: MyInt -> MyInt -> Ordering; primCmpInt (Pos Zero) (Pos Zero) = EQ; primCmpInt (Pos Zero) (Neg Zero) = EQ; primCmpInt (Neg Zero) (Pos Zero) = EQ; primCmpInt (Neg Zero) (Neg Zero) = EQ; primCmpInt (Pos x) (Pos y) = primCmpNat x y; primCmpInt (Pos x) (Neg y) = GT; primCmpInt (Neg x) (Pos y) = LT; primCmpInt (Neg x) (Neg y) = primCmpNat y x; compareMyInt :: MyInt -> MyInt -> Ordering compareMyInt = primCmpInt; esEsOrdering :: Ordering -> Ordering -> MyBool esEsOrdering LT LT = MyTrue; esEsOrdering LT EQ = MyFalse; esEsOrdering LT GT = MyFalse; esEsOrdering EQ LT = MyFalse; esEsOrdering EQ EQ = MyTrue; esEsOrdering EQ GT = MyFalse; esEsOrdering GT LT = MyFalse; esEsOrdering GT EQ = MyFalse; esEsOrdering GT GT = MyTrue; not :: MyBool -> MyBool; not MyTrue = MyFalse; not MyFalse = MyTrue; fsEsOrdering :: Ordering -> Ordering -> MyBool fsEsOrdering x y = not (esEsOrdering x y); ltEsMyInt :: MyInt -> MyInt -> MyBool ltEsMyInt x y = fsEsOrdering (compareMyInt x y) GT; enforceWHNF :: WHNF b -> a -> a; enforceWHNF (WHNF x) y = y; seq :: b -> a -> a; seq x y = enforceWHNF (WHNF x) y; dsEm :: (b -> a) -> b -> a; dsEm f x = seq x (f x); fromIntMyInt :: MyInt -> MyInt fromIntMyInt x = x; primMinusNat :: Nat -> Nat -> MyInt; primMinusNat Zero Zero = Pos Zero; primMinusNat Zero (Succ y) = Neg (Succ y); primMinusNat (Succ x) Zero = Pos (Succ x); primMinusNat (Succ x) (Succ y) = primMinusNat x y; primPlusNat :: Nat -> Nat -> Nat; primPlusNat Zero Zero = Zero; primPlusNat Zero (Succ y) = Succ y; primPlusNat (Succ x) Zero = Succ x; primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y)); primPlusInt :: MyInt -> MyInt -> MyInt; primPlusInt (Pos x) (Neg y) = primMinusNat x y; primPlusInt (Neg x) (Pos y) = primMinusNat y x; primPlusInt (Neg x) (Neg y) = Neg (primPlusNat x y); primPlusInt (Pos x) (Pos y) = Pos (primPlusNat x y); psMyInt :: MyInt -> MyInt -> MyInt psMyInt = primPlusInt; numericEnumFrom n = Cons n (dsEm numericEnumFrom (psMyInt n (fromIntMyInt (Pos (Succ Zero))))); otherwise :: MyBool; otherwise = MyTrue; takeWhile0 p x xs MyTrue = Nil; takeWhile1 p x xs MyTrue = Cons x (takeWhile p xs); takeWhile1 p x xs MyFalse = takeWhile0 p x xs otherwise; takeWhile2 p (Cons x xs) = takeWhile1 p x xs (p x); takeWhile3 p Nil = Nil; takeWhile3 vx vy = takeWhile2 vx vy; takeWhile :: (a -> MyBool) -> (List a) -> (List a); takeWhile p Nil = takeWhile3 p Nil; takeWhile p (Cons x xs) = takeWhile2 p (Cons x xs); numericEnumFromTo n m = takeWhile (flip ltEsMyInt m) (numericEnumFrom n); enumFromToMyInt :: MyInt -> MyInt -> (List MyInt) enumFromToMyInt = numericEnumFromTo; primCharToInt :: Char -> MyInt; primCharToInt (Char x) = x; fromEnumChar :: Char -> MyInt fromEnumChar = primCharToInt; map :: (b -> a) -> (List b) -> (List a); map f Nil = Nil; map f (Cons x xs) = Cons (f x) (map f xs); primIntToChar :: MyInt -> Char; primIntToChar x = Char x; toEnumChar :: MyInt -> Char toEnumChar = primIntToChar; enumFromToChar :: Char -> Char -> (List Char) enumFromToChar x y = map toEnumChar (enumFromToMyInt (fromEnumChar x) (fromEnumChar y));
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/basic_haskell/enumFromTo_4.hs
mit
3,753
0
15
830
1,712
908
804
96
1
-- | Settings are centralized, as much as possible, into this file. This -- includes database connection settings, static file locations, etc. -- In addition, you can configure a number of different aspects of Yesod -- by overriding methods in the Yesod typeclass. That instance is -- declared in the Foundation.hs file. module Settings where import Prelude import Text.Shakespeare.Text (st) import Language.Haskell.TH.Syntax import Yesod.Default.Config import Yesod.Default.Util import Data.Text (Text) import Data.Yaml import Control.Applicative import Settings.Development import Data.Default (def) import Text.Hamlet #if DEVELOPMENT import Database.Persist.Sqlite (SqliteConf) #else import Database.Persist.Postgresql (PostgresConf) #endif -- | Which Persistent backend this site is using. #if DEVELOPMENT type PersistConf = SqliteConf #else type PersistConf = PostgresConf #endif -- Static setting below. Changing these requires a recompile -- | The location of static files on your system. This is a file system -- path. The default value works properly with your scaffolded site. staticDir :: FilePath staticDir = "static" -- | The base URL for your static files. As you can see by the default -- value, this can simply be "static" appended to your application root. -- A powerful optimization can be serving static files from a separate -- domain name. This allows you to use a web server optimized for static -- files, more easily set expires and cache values, and avoid possibly -- costly transference of cookies on static files. For more information, -- please see: -- http://code.google.com/speed/page-speed/docs/request.html#ServeFromCookielessDomain -- -- If you change the resource pattern for StaticR in Foundation.hs, you will -- have to make a corresponding change here. -- -- To see how this value is used, see urlRenderOverride in Foundation.hs staticRoot :: AppConfig DefaultEnv x -> Text staticRoot conf = [st|#{appRoot conf}/static|] -- | Settings for 'widgetFile', such as which template languages to support and -- default Hamlet settings. -- -- For more information on modifying behavior, see: -- -- https://github.com/yesodweb/yesod/wiki/Overriding-widgetFile widgetFileSettings :: WidgetFileSettings widgetFileSettings = def { wfsHamletSettings = defaultHamletSettings { hamletNewlines = AlwaysNewlines } } -- The rest of this file contains settings which rarely need changing by a -- user. widgetFile :: String -> Q Exp widgetFile = (if development then widgetFileReload else widgetFileNoReload) widgetFileSettings data Extra = Extra { extraCopyright :: Text , extraAnalytics :: Maybe Text -- ^ Google Analytics } deriving Show parseExtra :: DefaultEnv -> Object -> Parser Extra parseExtra _ o = Extra <$> o .: "copyright" <*> o .:? "analytics"
TimeAttack/time-attack-server
Settings.hs
mit
2,878
0
9
502
293
188
105
-1
-1
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE Rank2Types #-} -- | Buffers for 'Builder's. This is a partial copy of blaze-builder-0.3.3.4's -- "Blaze.ByteString.Builder.Internal.Buffer" module, which was removed in -- blaze-builder-0.4. -- -- If you are using blaze-builder 0.3.*, this module just re-exports from -- "Blaze.ByteString.Builder.Internal.Buffer". -- -- Since 0.1.10.0 -- module Data.Streaming.ByteString.Builder.Buffer ( -- * Buffers Buffer (..) -- ** Status information , freeSize , sliceSize , bufferSize -- ** Creation and modification , allocBuffer , reuseBuffer , nextSlice , updateEndOfSlice -- ** Conversion to bytestings , unsafeFreezeBuffer , unsafeFreezeNonEmptyBuffer -- * Buffer allocation strategies , BufferAllocStrategy , allNewBuffersStrategy , reuseBufferStrategy , defaultStrategy ) where import Data.ByteString.Lazy.Internal (defaultChunkSize) import qualified Data.ByteString as S import qualified Data.ByteString.Internal as S import Foreign (Word8, ForeignPtr, Ptr, plusPtr, minusPtr) import Foreign.ForeignPtr.Unsafe (unsafeForeignPtrToPtr) ------------------------------------------------------------------------------ -- Buffers ------------------------------------------------------------------------------ -- | A buffer @Buffer fpbuf p0 op ope@ describes a buffer with the underlying -- byte array @fpbuf..ope@, the currently written slice @p0..op@ and the free -- space @op..ope@. -- -- Since 0.1.10.0 -- data Buffer = Buffer {-# UNPACK #-} !(ForeignPtr Word8) -- underlying pinned array {-# UNPACK #-} !(Ptr Word8) -- beginning of slice {-# UNPACK #-} !(Ptr Word8) -- next free byte {-# UNPACK #-} !(Ptr Word8) -- first byte after buffer -- | The size of the free space of the buffer. -- -- Since 0.1.10.0 -- freeSize :: Buffer -> Int freeSize (Buffer _ _ op ope) = ope `minusPtr` op -- | The size of the written slice in the buffer. -- -- Since 0.1.10.0 -- sliceSize :: Buffer -> Int sliceSize (Buffer _ p0 op _) = op `minusPtr` p0 -- | The size of the whole byte array underlying the buffer. -- -- Since 0.1.10.0 -- bufferSize :: Buffer -> Int bufferSize (Buffer fpbuf _ _ ope) = ope `minusPtr` unsafeForeignPtrToPtr fpbuf -- | @allocBuffer size@ allocates a new buffer of size @size@. -- -- Since 0.1.10.0 -- {-# INLINE allocBuffer #-} allocBuffer :: Int -> IO Buffer allocBuffer size = do fpbuf <- S.mallocByteString size let !pbuf = unsafeForeignPtrToPtr fpbuf return $! Buffer fpbuf pbuf pbuf (pbuf `plusPtr` size) -- | Resets the beginning of the next slice and the next free byte such that -- the whole buffer can be filled again. -- -- Since 0.1.10.0 -- {-# INLINE reuseBuffer #-} reuseBuffer :: Buffer -> Buffer reuseBuffer (Buffer fpbuf _ _ ope) = Buffer fpbuf p0 p0 ope where p0 = unsafeForeignPtrToPtr fpbuf -- | Convert the buffer to a bytestring. This operation is unsafe in the sense -- that created bytestring shares the underlying byte array with the buffer. -- Hence, depending on the later use of this buffer (e.g., if it gets reset and -- filled again) referential transparency may be lost. -- -- Since 0.1.10.0 -- {-# INLINE unsafeFreezeBuffer #-} unsafeFreezeBuffer :: Buffer -> S.ByteString unsafeFreezeBuffer (Buffer fpbuf p0 op _) = S.PS fpbuf (p0 `minusPtr` unsafeForeignPtrToPtr fpbuf) (op `minusPtr` p0) -- | Convert a buffer to a non-empty bytestring. See 'unsafeFreezeBuffer' for -- the explanation of why this operation may be unsafe. -- -- Since 0.1.10.0 -- {-# INLINE unsafeFreezeNonEmptyBuffer #-} unsafeFreezeNonEmptyBuffer :: Buffer -> Maybe S.ByteString unsafeFreezeNonEmptyBuffer buf | sliceSize buf <= 0 = Nothing | otherwise = Just $ unsafeFreezeBuffer buf -- | Update the end of slice pointer. -- -- Since 0.1.10.0 -- {-# INLINE updateEndOfSlice #-} updateEndOfSlice :: Buffer -- Old buffer -> Ptr Word8 -- New end of slice -> Buffer -- Updated buffer updateEndOfSlice (Buffer fpbuf p0 _ ope) op' = Buffer fpbuf p0 op' ope -- | Move the beginning of the slice to the next free byte such that the -- remaining free space of the buffer can be filled further. This operation -- is safe and can be used to fill the remaining part of the buffer after a -- direct insertion of a bytestring or a flush. -- -- Since 0.1.10.0 -- {-# INLINE nextSlice #-} nextSlice :: Int -> Buffer -> Maybe Buffer nextSlice minSize (Buffer fpbuf _ op ope) | ope `minusPtr` op <= minSize = Nothing | otherwise = Just (Buffer fpbuf op op ope) ------------------------------------------------------------------------------ -- Buffer allocation strategies ------------------------------------------------------------------------------ -- | A buffer allocation strategy @(buf0, nextBuf)@ specifies the initial -- buffer to use and how to compute a new buffer @nextBuf minSize buf@ with at -- least size @minSize@ from a filled buffer @buf@. The double nesting of the -- @IO@ monad helps to ensure that the reference to the filled buffer @buf@ is -- lost as soon as possible, but the new buffer doesn't have to be allocated -- too early. -- -- Since 0.1.10.0 -- type BufferAllocStrategy = (IO Buffer, Int -> Buffer -> IO (IO Buffer)) -- | The simplest buffer allocation strategy: whenever a buffer is requested, -- allocate a new one that is big enough for the next build step to execute. -- -- NOTE that this allocation strategy may spill quite some memory upon direct -- insertion of a bytestring by the builder. Thats no problem for garbage -- collection, but it may lead to unreasonably high memory consumption in -- special circumstances. -- -- Since 0.1.10.0 -- allNewBuffersStrategy :: Int -- Minimal buffer size. -> BufferAllocStrategy allNewBuffersStrategy bufSize = ( allocBuffer bufSize , \reqSize _ -> return (allocBuffer (max reqSize bufSize)) ) -- | An unsafe, but possibly more efficient buffer allocation strategy: -- reuse the buffer, if it is big enough for the next build step to execute. -- -- Since 0.1.10.0 -- reuseBufferStrategy :: IO Buffer -> BufferAllocStrategy reuseBufferStrategy buf0 = (buf0, tryReuseBuffer) where tryReuseBuffer reqSize buf | bufferSize buf >= reqSize = return $ return (reuseBuffer buf) | otherwise = return $ allocBuffer reqSize defaultStrategy :: BufferAllocStrategy defaultStrategy = allNewBuffersStrategy defaultChunkSize
fpco/streaming-commons
Data/Streaming/ByteString/Builder/Buffer.hs
mit
6,645
0
11
1,357
914
535
379
79
1
{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeSynonymInstances #-} module Oden.Infer ( TypeError(..), Subst(..), TypeBinding(..), TypingEnvironment, inferExpr, inferDefinition, inferPackage, constraintsExpr ) where import Control.Arrow (left) import Control.Monad.Except import Control.Monad.RWS hiding ((<>)) import qualified Data.Map as Map import qualified Data.Set as Set import qualified Oden.Core as Core import Oden.Core.Operator import qualified Oden.Core.Untyped as Untyped import Oden.Environment as Environment hiding (map) import Oden.Identifier import Oden.Infer.Environment import Oden.Infer.Substitution import Oden.Infer.Subsumption import Oden.Infer.Unification import Oden.Metadata import Oden.Predefined import Oden.QualifiedName (QualifiedName(..), nameInUniverse) import Oden.SourceInfo import Oden.Type.Polymorphic import Oden.Type.Signature ------------------------------------------------------------------------------- -- Classes ------------------------------------------------------------------------------- -- | Inference monad. type Infer a = (RWST TypingEnvironment -- Typing environment [Constraint] -- Generated constraints InferState -- Inference state (Except -- Inference errors TypeError) a) -- Result -- | Inference state. data InferState = InferState { count :: Int } -- | Initial inference state. initInfer :: InferState initInfer = InferState { count = 0 } instance FTV TypeBinding where ftv (Package _ _ e) = ftv e ftv (Local _ _ d) = ftv d ftv (Type _ _ _ fs) = ftv fs ftv (QuantifiedType _ _ t) = ftv t instance Substitutable TypeBinding where apply _ (Package si n e) = Package si n e apply s (Local si n d) = Local si n (apply s d) apply s (Type si n bs t) = Type si n bs (apply s t) apply s (QuantifiedType si n t) = QuantifiedType si n (apply s t) instance FTV TypingEnvironment where ftv (Environment env) = ftv $ Map.elems env instance Substitutable TypingEnvironment where apply s (Environment env) = Environment $ Map.map (apply s) env data TypeError = UnificationError UnificationError | PackageNotInScope SourceInfo Identifier | NotInScope SourceInfo Identifier | MemberNotInPackage SourceInfo Identifier Identifier | ArgumentCountMismatch (Core.Expr Type) [Type] [Type] | TypeSignatureSubsumptionError Identifier SubsumptionError | InvalidPackageReference SourceInfo Identifier | ValueUsedAsType SourceInfo Identifier | TypeIsNotAnExpression SourceInfo Identifier deriving (Show, Eq) -- | Run the inference monad. runInfer :: TypingEnvironment -> Infer a -> Either TypeError (a, [Constraint]) runInfer env m = runExcept $ evalRWST m env initInfer -- | Solve for the top-level type of an expression in a given typing -- environment. inferExpr :: TypingEnvironment -> Untyped.Expr -> Either TypeError Core.CanonicalExpr inferExpr env ex = do (te, cs) <- runInfer env (infer ex) subst <- left UnificationError $ runSolve cs return $ closeOver (apply subst te) -- | Return the internal constraints used in solving for the type of an -- expression. constraintsExpr :: TypingEnvironment -> Untyped.Expr -> Either TypeError ([Constraint], Subst, Core.Expr Type, Scheme) constraintsExpr env ex = do (te, cs) <- runInfer env (infer ex) subst <- left UnificationError $ runSolve cs let (sc, te') = closeOver $ apply subst te return (cs, subst, te', sc) -- | Canonicalize and return the polymorphic top-level type. closeOver :: Core.Expr Type -> Core.CanonicalExpr closeOver = normalize . generalize empty -- | Unify two types. uni :: SourceInfo -> Type -> Type -> Infer () uni si t1 t2 = tell [(si, t1, t2)] -- | Extend the typing environment. inEnv :: (Identifier, TypeBinding) -> Infer a -> Infer a inEnv (x, sc) = local (`extend` (x, sc)) lookupTypeIn :: TypingEnvironment -> Metadata SourceInfo -> Identifier -> Infer Type lookupTypeIn env (Metadata si) identifier = case Environment.lookup identifier env of Nothing -> throwError $ NotInScope si identifier Just Package{} -> throwError $ InvalidPackageReference si identifier Just Local{} -> throwError $ ValueUsedAsType si identifier Just (Type _ _ _ t) -> return t Just (QuantifiedType _ _ t) -> return t -- | Lookup a type in the environment. lookupType :: Metadata SourceInfo -> Identifier -> Infer Type lookupType si identifier = do env <- ask lookupTypeIn env si identifier lookupValueIn :: TypingEnvironment -> Metadata SourceInfo -> Identifier -> Infer Type lookupValueIn env (Metadata si) identifier = do type' <- case Environment.lookup identifier env of Nothing -> throwError $ NotInScope si identifier Just Package{} -> throwError $ InvalidPackageReference si identifier Just (Local _ _ sc) -> instantiate sc Just Type{} -> throwError (TypeIsNotAnExpression si identifier) Just QuantifiedType{} -> throwError (TypeIsNotAnExpression si identifier) return $ setSourceInfo si type' -- | Lookup type of a value in the environment. lookupValue :: Metadata SourceInfo -> Identifier -> Infer Type lookupValue si identifier = do env <- ask lookupValueIn env si identifier letters :: [String] letters = [1..] >>= flip replicateM ['a'..'z'] -- | Create a new type variable with a unique name. fresh :: Metadata SourceInfo -> Infer Type fresh si = do s <- get put s{count = count s + 1} return $ TVar si (TV ("_t" ++ show (count s))) -- | Create a type based on scheme but with all fresh type variables. instantiate :: Scheme -> Infer Type instantiate (Forall _ as t) = do as' <- mapM (fresh . Metadata . getSourceInfo) as let s = Subst $ Map.fromList $ zip (map getBindingVar as) as' return $ apply s t -- | Given a typed expression, return a canonical expression with the free -- type variables (not present in the environment) declared as type variable -- bindings for the expression. generalize :: TypingEnvironment -> Core.Expr Type -> Core.CanonicalExpr generalize env expr = (Forall (Metadata $ getSourceInfo expr) bindings (Core.typeOf expr), expr) where bindings = map (TVarBinding $ Metadata Missing) (Set.toList $ ftv expr `Set.difference` ftv env) universeType :: Metadata SourceInfo -> String -> Type universeType si n = TCon si (nameInUniverse n) wrapForeign :: Metadata SourceInfo -> Core.Expr Type -> Type -> Infer (Core.Expr Type) wrapForeign si expr t = case t of TForeignFn _ _ [] returnTypes -> do let wrappedReturnType = wrapReturnTypes returnTypes innerExpr = Core.ForeignFnApplication si expr [] wrappedReturnType return $ Core.NoArgFn si innerExpr (TNoArgFn si wrappedReturnType) TForeignFn _ _ parameterTypes returnTypes -> do let generatedNames = map (Identifier . ("_g" ++) . show) [(0 :: Int)..] namesAndTypes = zip generatedNames parameterTypes wrappedReturnType = wrapReturnTypes returnTypes innerExpr = Core.ForeignFnApplication si expr (map (uncurry $ Core.Symbol si) namesAndTypes) wrappedReturnType fst <$> foldM wrapInFn (innerExpr, wrappedReturnType) (reverse namesAndTypes) _ -> return expr where wrapInFn (expr', returnType) (name, type') = let fnType = TFn si type' returnType in return (Core.Fn si (Core.NameBinding si name) expr' fnType, fnType) wrapReturnTypes [] = TCon si (nameInUniverse "unit") wrapReturnTypes [returnType] = returnType wrapReturnTypes (ft:st:ts) = TTuple si ft st ts -- | The heart of the inferencer. Takes an untyped expression and returns the -- inferred typed expression. Constraints are collected in the 'Infer' monad -- and substitutions are made before the inference is complete, so the -- expressions returned from 'infer' are not the final results. infer :: Untyped.Expr -> Infer (Core.Expr Type) infer expr = case expr of Untyped.Literal si Untyped.Unit -> return (Core.Literal si Core.Unit (TCon si (nameInUniverse "unit"))) Untyped.Literal si (Untyped.Int n) -> return (Core.Literal si (Core.Int n) (TCon si (nameInUniverse "int"))) Untyped.Literal si (Untyped.Bool b) -> return (Core.Literal si (Core.Bool b) (TCon si (nameInUniverse "bool"))) Untyped.Literal si (Untyped.String s) -> return (Core.Literal si (Core.String s) (TCon si (nameInUniverse "string"))) Untyped.Subscript si s i -> do st <- infer s it <- infer i tv <- fresh si uni (getSourceInfo st) (Core.typeOf st) (TSlice (Metadata $ getSourceInfo s) tv) uni (getSourceInfo it) (Core.typeOf it) (TCon si (nameInUniverse "int")) return (Core.Subscript si st it tv) Untyped.Subslice si s range -> case range of (Untyped.Range lowerExpr upperExpr) -> do st <- infer s lowerExprTyped <- infer lowerExpr upperExprTyped <- infer upperExpr tv <- fresh si uni (getSourceInfo st) (Core.typeOf st) (TSlice (Metadata $ getSourceInfo s) tv) uni (getSourceInfo upperExprTyped) (Core.typeOf lowerExprTyped) (universeType si "int") uni (getSourceInfo upperExprTyped) (Core.typeOf upperExprTyped) (universeType si "int") return (Core.Subslice si st (Core.Range lowerExprTyped upperExprTyped) (TSlice si tv)) (Untyped.RangeTo upperExpr) -> inferUnboundedRange Core.RangeTo upperExpr (Untyped.RangeFrom lowerExpr) -> inferUnboundedRange Core.RangeFrom lowerExpr where inferUnboundedRange f boundExpr = do st <- infer s boundExprTyped <- infer boundExpr tv <- fresh si uni (getSourceInfo st) (Core.typeOf st) (TSlice (Metadata $ getSourceInfo s) tv) uni (getSourceInfo boundExprTyped) (Core.typeOf boundExprTyped) (universeType si "int") return (Core.Subslice si st (f boundExprTyped) (TSlice si tv)) Untyped.UnaryOp si o e -> do rt <- case o of Positive -> return (universeType si "int") Negative -> return (universeType si "int") Not -> return (universeType si "bool") te <- infer e uni (getSourceInfo te) (Core.typeOf te) rt return (Core.UnaryOp si o te rt) Untyped.BinaryOp si o e1 e2 -> do (ot, rt) <- case o of Add -> return (universeType si "int", universeType si "int") Subtract -> return (universeType si "int", universeType si "int") Multiply -> return (universeType si "int", universeType si "int") Divide -> return (universeType si "int", universeType si "int") Equals -> do tv <- fresh si return (tv, universeType si "bool") Concat -> return (universeType si "string", universeType si "string") LessThan -> return (universeType si "int", universeType si "bool") GreaterThan -> return (universeType si "int", universeType si "bool") LessThanEqual -> return (universeType si "int", universeType si "bool") GreaterThanEqual -> return (universeType si "int", universeType si "bool") And -> return (universeType si "bool", universeType si "bool") Or -> return (universeType si "bool", universeType si "bool") te1 <- infer e1 te2 <- infer e2 uni (getSourceInfo te1) (Core.typeOf te1) ot uni (getSourceInfo te2) (Core.typeOf te2) ot return (Core.BinaryOp si o te1 te2 rt) Untyped.Symbol si x -> do t <- lookupValue si x wrapForeign si (Core.Symbol si x t) t Untyped.MemberAccess si expr'@(Untyped.Symbol symbolSourceInfo name) memberName -> do env <- ask case Environment.lookup name env of Just (Package _ _ pkgEnv) -> do valueType <- lookupValueIn pkgEnv si memberName wrapForeign si (Core.PackageMemberAccess si name memberName valueType) valueType Just _ -> inferRecordFieldAccess si expr' memberName Nothing -> throwError $ NotInScope (unwrap symbolSourceInfo) name Untyped.MemberAccess si expr' fieldName -> inferRecordFieldAccess si expr' fieldName Untyped.Fn si (Untyped.NameBinding bsi a) b -> do tv <- fresh bsi tb <- inEnv (a, Local bsi a (Forall bsi [] tv)) (infer b) return (Core.Fn si (Core.NameBinding bsi a) tb (TFn si tv (Core.typeOf tb))) Untyped.NoArgFn si f -> do tf <- infer f return (Core.NoArgFn si tf (TNoArgFn si (Core.typeOf tf))) Untyped.Application si f ps -> do tf <- infer f case Core.typeOf tf of -- No-arg functions t | null ps -> do tv <- fresh si uni (getSourceInfo tf) t (TNoArgFn (Metadata $ getSourceInfo tf) tv) return (Core.NoArgApplication si tf tv) -- Everything else, i.e. functions with a single argument and one return value _ -> foldM app tf ps where app :: Core.Expr Type -> Untyped.Expr -> Infer (Core.Expr Type) app tf' p = do tv <- fresh si tp <- infer p uni (getSourceInfo tf) (Core.typeOf tf') (TFn (Metadata $ getSourceInfo tf) (Core.typeOf tp) tv) return (Core.Application si tf' tp tv) Untyped.Let si (Untyped.NameBinding bsi n) e b -> do te <- infer e tb <- inEnv (n, Local bsi n (Forall si [] (Core.typeOf te))) (infer b) return (Core.Let si (Core.NameBinding bsi n) te tb (Core.typeOf tb)) Untyped.If si cond tr fl -> do tcond <- infer cond ttr <- infer tr tfl <- infer fl uni (getSourceInfo tcond) (Core.typeOf tcond) (universeType si "bool") uni (getSourceInfo ttr) (Core.typeOf ttr) (Core.typeOf tfl) return (Core.If si tcond ttr tfl (Core.typeOf ttr)) Untyped.Tuple si f s r -> do tf <- infer f ts <- infer s tr <- mapM infer r let t = TTuple si (Core.typeOf tf) (Core.typeOf ts) (map Core.typeOf tr) return (Core.Tuple si tf ts tr t) Untyped.Slice si es -> do tv <- fresh si tes <- mapM infer es mapM_ (uni (unwrap si) tv . Core.typeOf) tes return (Core.Slice si tes (TSlice si tv)) Untyped.Block si es -> do tv <- fresh si tes <- mapM infer es case tes of [] -> uni (unwrap si) tv (universeType si "unit") _ -> uni (unwrap si) tv (Core.typeOf (last tes)) return (Core.Block si tes tv) Untyped.RecordInitializer si fields -> do (fieldInitializers, row) <- foldM unifyFields ([], REmpty si) fields return (Core.RecordInitializer si (TRecord si row) fieldInitializers) where unifyFields (typedFields, row) (Untyped.FieldInitializer fsi label expr') = do typedExpr <- infer expr' return (Core.FieldInitializer fsi label typedExpr : typedFields, RExtension fsi label (Core.typeOf typedExpr) row) where inferRecordFieldAccess si expr' label = do fieldType <- fresh si recordExtType <- fresh si typedExpr <- infer expr' uni (getSourceInfo typedExpr) (TRecord (Metadata $ getSourceInfo typedExpr) (RExtension si label fieldType recordExtType)) (Core.typeOf typedExpr) return (Core.RecordFieldAccess si typedExpr label fieldType) -- | Tries to resolve a user-supplied type expression to an actual type. resolveType :: SignatureExpr SourceInfo -> Infer Type resolveType (TSUnit si) = return (universeType (Metadata si) "unit") resolveType (TSSymbol si i) = lookupType (Metadata si) i resolveType (TSApp _si _e1 _e2) = error "Type constructor application not implemented yet." resolveType (TSFn si de re) = TFn (Metadata si) <$> resolveType de <*> resolveType re resolveType (TSNoArgFn si e) = TNoArgFn (Metadata si) <$> resolveType e resolveType (TSTuple si fe se re) = TTuple (Metadata si) <$> resolveType fe <*> resolveType se <*> mapM resolveType re resolveType (TSSlice si e) = TSlice (Metadata si) <$> resolveType e resolveType (TSRowEmpty si) = return $ REmpty (Metadata si) resolveType (TSRowExtension si label type' row) = RExtension (Metadata si) label <$> resolveType type' <*> resolveType row resolveType (TSRecord si r) = TRecord (Metadata si) <$> resolveType r -- | Tries to resolve a user-supplied type signature to an actual type scheme. resolveTypeSignature :: TypeSignature SourceInfo -> Infer (Scheme, TypingEnvironment) resolveTypeSignature (TypeSignature si bindings expr) = do env <- ask envWithBindings <- foldM extendWithBinding env bindings t <- local (const envWithBindings) (resolveType expr) return (Forall (Metadata si) (map toVarBinding bindings) t, envWithBindings) where extendWithBinding env' (SignatureVarBinding si' v) = do --return $ env' `extend` (v, QuantifiedType (Metadata si') v (TVar (Metadata si') (TV varName))) tv <- fresh (Metadata si') return $ env' `extend` (v, QuantifiedType (Metadata si') v tv) toVarBinding (SignatureVarBinding si' (Identifier v)) = TVarBinding (Metadata si') (TV v) -- | Indicates if the canonical expression should be generated by closing over -- the the free type variables in the inferred expression. This is done when -- top type signatures are missing. type ShouldCloseOver = Bool -- | Infer the untyped definition in the Infer monad, returning a typed -- version. Resolves type signatures of optionally type-annotated definitions. inferDef :: Untyped.Definition -> Infer (Core.Definition, ShouldCloseOver) inferDef (Untyped.Definition si name signature expr) = do env <- ask case signature of Nothing -> do tv <- fresh si let recScheme = Forall si [] tv let recursiveEnv = env `extend` (name, Local si name recScheme) te <- local (const recursiveEnv) (infer expr) return (Core.Definition si name (recScheme, te), True) Just ts -> do (recScheme@(Forall _ _ recType), envWithBindings) <- resolveTypeSignature ts let recursiveEnv = envWithBindings `extend` (name, Local si name recScheme) te <- local (const recursiveEnv) (infer expr) uni (getSourceInfo te) (Core.typeOf te) recType case recScheme `subsumedBy` te of Left e -> throwError $ TypeSignatureSubsumptionError name e Right canonical -> return (Core.Definition si name canonical, False) inferDef (Untyped.TypeDefinition si name params typeExpr) = do type' <- resolveType typeExpr return (Core.TypeDefinition si name (map convertParams params) type', False) where convertParams (Untyped.NameBinding bsi bn) = Core.NameBinding bsi bn -- | Infer a top-level definitition, returning a typed version and the typing -- environment extended with the definitions name and type. inferDefinition :: TypingEnvironment -> Untyped.Definition -> Either TypeError (TypingEnvironment, Core.Definition) inferDefinition env def = do -- Infer the definition. ((def', shouldCloseOver), cs) <- runInfer env (inferDef def) case def' of Core.Definition si name (_, te) | shouldCloseOver -> do subst <- left UnificationError $ runSolve cs let canonical'@(sc, _) = closeOver (apply subst te) env' = env `extend` (name, Local si name sc) return (env', Core.Definition si name canonical') Core.Definition si name canonical -> do subst <- left UnificationError $ runSolve cs let canonical'@(sc, _) = normalize (apply subst canonical) env' = env `extend` (name, Local si name sc) return (env', Core.Definition si name canonical') Core.TypeDefinition si name@(FQN _ localName) params type' -> return (env `extend` (localName, Type si name params type'), def') Core.ForeignDefinition _ name _ -> error ("unexpected foreign definition: " ++ asString name) -- | Infer the package, returning a package with typed definitions along with -- the extended typing environment. inferPackage :: Untyped.Package [Core.ImportedPackage] -> Either TypeError Core.Package inferPackage (Untyped.Package (Untyped.PackageDeclaration psi name) imports defs) = do let env = fromPackage universe `merge` fromPackages imports inferred <- snd <$> foldM iter (env, []) defs return (Core.Package (Core.PackageDeclaration psi name) imports inferred) where iter (e, inferred) def = do (e', def') <- inferDefinition e def return (e', inferred ++ [def']) -- | Swaps all type variables names for generated ones based on 'letters' to -- get a nice sequence. normalize :: (Scheme, Core.Expr Type) -> (Scheme, Core.Expr Type) normalize (Forall si _ exprType, te) = (Forall si newBindings (apply subst exprType), apply subst te) where -- Pairs of existing type variables in the type and new type variables -- values to substitute with, a sequence based on 'letters'. substPairs = zip (Set.toList $ ftv exprType) (map TV letters) -- The substitution based on the pairs. subst = Subst (Map.fromList (map wrapTvar substPairs)) wrapTvar (tv1, tv2) = (tv1, TVar (Metadata Missing) tv2) -- The new set of type variables bindings for the canonical expression. newBindings = map (TVarBinding (Metadata Missing) . snd) substPairs
AlbinTheander/oden
src/Oden/Infer.hs
mit
21,806
0
22
5,342
6,983
3,430
3,553
381
39
module Main where main = putStrLn "Hello, World"
CSE3320/30-languages-in-30-days
Day-08-Haskell/hello.hs
mit
50
0
5
9
12
7
5
2
1
{-# LANGUAGE FlexibleInstances #-} module Calc where import ExprT import Parser import qualified Data.Map as M eval :: ExprT -> Integer eval (Lit n) = n eval (Mul x y) = eval x * eval y eval (Add x y) = eval x + eval y ------------------------------------- evalStr :: String -> Maybe Integer evalStr s = fmap eval (parseExp Lit Add Mul s) ------------------------------------- class Expr a where lit :: Integer -> a mul :: a -> a -> a add :: a -> a -> a instance Expr ExprT where lit = Lit mul = Mul add = Add -------------------------------------- instance Expr Integer where lit = id mul = (*) add = (+) instance Expr Bool where lit n = n > 0 mul = (&&) add = (||) newtype MinMax = MinMax Integer deriving (Eq, Show) instance Expr MinMax where lit = MinMax mul (MinMax x) (MinMax y) = lit $ if x < y then x else y add (MinMax x) (MinMax y) = lit $ if x > y then x else y newtype Mod7 = Mod7 Integer deriving (Eq, Show) instance Expr Mod7 where lit = Mod7 mul (Mod7 x) (Mod7 y) = lit $ (x * y) `mod` 7 add (Mod7 x) (Mod7 y) = lit $ (x + y) `mod` 7 testExp :: Expr a => Maybe a testExp = parseExp lit add mul "(3*-4) + 55" testInteger = testExp :: Maybe Integer testBool = testExp :: Maybe Bool testMM = testExp :: Maybe MinMax testSat = testExp :: Maybe Mod7 ----------------------------------------------------------------------------- class HasVars a where var :: String -> a data VarExprT = VLit Integer | VAdd VarExprT VarExprT | VMul VarExprT VarExprT | Var String deriving (Show, Eq) instance Expr VarExprT where lit = VLit mul = VMul add = VAdd instance HasVars VarExprT where var = Var instance HasVars (M.Map String Integer -> Maybe Integer) where var k m = M.lookup k m instance Expr (M.Map String Integer -> Maybe Integer) where lit i _ = Just i mul f g m = do rf <- f m rg <- g m return $ rf * rg add f g m = (+) <$> (f m) <*> (g m) withVars :: [(String, Integer)] -> (M.Map String Integer -> Maybe Integer) -> Maybe Integer withVars vs exp = exp $ M.fromList vs
flocknroll/haskell_cis194
H5/Calc.hs
mit
2,230
0
10
645
865
458
407
69
1
-- file: ch04/FixLines.hs import System.Environment (getArgs) interactWith function inputFile outputFile = do input <- readFile inputFile writeFile outputFile (function input) main = mainWith myFunction where mainWith function = do args <- getArgs case args of [input, output] -> interactWith function input output _ -> putStrLn "error: exactly two arguments needed" -- replace "id" with the name of our function, below myFunction = fixLines -- | Split a string on any (system independant) newlines, i.e. \r or \r\n splitLines :: String -> [String] splitLines [] = [] splitLines cs = let (pre, suf) = break isLineTerminator cs in pre : case suf of ('\r':'\n':rest) -> splitLines rest ('\r':rest) -> splitLines rest ('\n':rest) -> splitLines rest _ -> [] isLineTerminator c = c == '\r' || c == '\n' -- | This function splits input using your improved splitLines, then -- re-attaches them into the original format. fixLines :: String -> String fixLines input = unlines (splitLines input)
supermitch/learn-haskell
real-world-haskell/ch04/FixLines.hs
mit
1,171
0
12
341
282
142
140
23
4
{-# LANGUAGE OverloadedStrings #-} module Bot.NetHack.EdibleCorpses ( isEdible ) where import qualified Data.Text as T isEdible :: T.Text -> Bool isEdible name = isEdible' (stripPartiallyEaten $ T.strip $ T.toLower name) stripPartiallyEaten :: T.Text -> T.Text stripPartiallyEaten txt = if T.isInfixOf "partially eaten " txt then T.drop 16 txt else txt isEdible' :: T.Text -> Bool isEdible' "newt" = True isEdible' "lichen" = True isEdible' "jackal" = True isEdible' "coyote" = True isEdible' "owlbear" = True isEdible' "floating eye" = True isEdible' "giant ant" = True isEdible' "fire ant" = True isEdible' "black unicorn" = True isEdible' "white unicorn" = True isEdible' "gray unicorn" = True isEdible' "pony" = True isEdible' "horse" = True isEdible' "warhorse" = True isEdible' "leocrotta" = True isEdible' "quivering blob" = True isEdible' "gelatinous cube" = True isEdible' "pyrolisk" = True isEdible' "fox" = True isEdible' "wolf" = True isEdible' "warg" = True isEdible' "rothe" = True isEdible' "winter wolf" = True isEdible' "winter wolf cub" = True isEdible' "hell hound" = True isEdible' "hell hound pup" = True isEdible' "panther" = True isEdible' "jaguar" = True isEdible' "tiger" = True isEdible' "gargoyle" = True isEdible' "winged gargoyle" = True isEdible' "mind flayer" = True isEdible' "master mind flayer" = True isEdible' "imp" = True isEdible' "goblin" = True isEdible' "hobgoblin" = True isEdible' "orc" = True isEdible' "hill orc" = True isEdible' "mordor orc" = True isEdible' "uruk-hai" = True isEdible' "orc shaman" = True isEdible' "orc-captain" = True isEdible' "rock piercer" = True isEdible' "iron piercer" = True isEdible' "glass piercer" = True isEdible' "mumak" = True isEdible' "wumpus" = True isEdible' "titanothere" = True isEdible' "baluchitherium" = True isEdible' "cave spider" = True isEdible' "black naga hatchling" = True isEdible' "blue naga hatchling" = True isEdible' "golden naga hatchling" = True isEdible' "guardian naga hatchling" = True isEdible' "red naga hatchling" = True isEdible' "black naga" = True isEdible' "blue naga" = True isEdible' "golden naga" = True isEdible' "guardian naga" = True isEdible' "red naga" = True isEdible' "ape" = True isEdible' "yeti" = True isEdible' "mastodon" = True isEdible' "monkey" = True isEdible' "sasquatch" = True isEdible' "sewer rat" = True isEdible' "giant rat" = True isEdible' "rock mole" = True isEdible' "woodchuck" = True isEdible' "baby long worm" = True isEdible' "baby purple worm" = True isEdible' "long worm" = True isEdible' "purple worm" = True isEdible' "zruty" = True isEdible' "raven" = True isEdible' "shrieker" = True isEdible' "red mold" = True isEdible' "brown mold" = True isEdible' "stone giant" = True isEdible' "hill giant" = True isEdible' "fire giant" = True isEdible' "frost giant" = True isEdible' "storm giant" = True isEdible' "ettin" = True isEdible' "minotaur" = True isEdible' "jabberwock" = True isEdible' "ogre" = True isEdible' "ogre lord" = True isEdible' "ogre king" = True isEdible' "gray ooze" = True isEdible' "rust monster" = True isEdible' "disenchanter" = True isEdible' "garter snake" = True isEdible' "troll" = True isEdible' "ice troll" = True isEdible' "rock troll" = True isEdible' "water troll" = True isEdible' "olog-hai" = True isEdible' "umber hulk" = True isEdible' "wraith" = True isEdible' "carnivorous ape" = True isEdible' "flesh golem" = True isEdible' "gecko" = True isEdible' "iguana" = True isEdible' "baby crocodile" = True isEdible' "crocodile" = True isEdible' "baby gray dragon" = True isEdible' "baby silver dragon" = True isEdible' "baby red dragon" = True isEdible' "baby white dragon" = True isEdible' "baby orange dragon" = True isEdible' "baby black dragon" = True isEdible' "baby blue dragon" = True isEdible' "baby green dragon" = True isEdible' "baby yellow dragon" = True isEdible' "gray dragon" = True isEdible' "silver dragon" = True isEdible' "red dragon" = True isEdible' "white dragon" = True isEdible' "orange dragon" = True isEdible' "black dragon" = True isEdible' "blue dragon" = True isEdible' "green dragon" = True isEdible' "yellow dragon" = True isEdible' _ = False
Noeda/adeonbot
bot/src/Bot/NetHack/EdibleCorpses.hs
mit
4,173
0
9
670
1,115
563
552
137
2
{-# LANGUAGE NoImplicitPrelude #-} module Rx.Observable.Timeout where import Prelude.Compat import Control.Exception (toException) import Control.Monad (void, when) import Tiempo (TimeInterval, seconds) import Rx.Disposable (dispose, newBooleanDisposable, newSingleAssignmentDisposable, setDisposable, toDisposable) import Rx.Scheduler (Async, Scheduler, newThread, scheduleTimed) import Rx.Observable.Types -------------------------------------------------------------------------------- data TimeoutOptions a = TimeoutOptions { _timeoutInterval :: TimeInterval , _timeoutStartAfterFirst :: Bool , _completeOnTimeout :: Bool , _resetTimeoutWhen :: a -> Bool , _timeoutScheduler :: Scheduler Async } timeoutScheduler :: Scheduler Async -> TimeoutOptions a -> TimeoutOptions a timeoutScheduler scheduler opts = opts { _timeoutScheduler = scheduler } timeoutDelay :: TimeInterval -> TimeoutOptions a -> TimeoutOptions a timeoutDelay interval opts = opts { _timeoutInterval = interval } startAfterFirst :: TimeoutOptions a -> TimeoutOptions a startAfterFirst opts = opts { _timeoutStartAfterFirst = True } resetTimeoutWhen :: (a -> Bool) -> TimeoutOptions a -> TimeoutOptions a resetTimeoutWhen resetQuery opts = opts { _resetTimeoutWhen = resetQuery } completeOnTimeout :: TimeoutOptions a -> TimeoutOptions a completeOnTimeout opts = opts { _completeOnTimeout = True } -------------------------------------------------------------------------------- timeoutWith :: (TimeoutOptions a -> TimeoutOptions a) -> Observable Async a -> Observable Async a timeoutWith modFn source = Observable $ \observer -> do sourceDisposable <- newSingleAssignmentDisposable timerDisposable <- newBooleanDisposable subscription <- main sourceDisposable timerDisposable observer setDisposable sourceDisposable subscription return $ toDisposable sourceDisposable `mappend` toDisposable timerDisposable where defOpts = TimeoutOptions { _timeoutInterval = seconds 0 , _timeoutStartAfterFirst = False , _completeOnTimeout = False , _resetTimeoutWhen = const True , _timeoutScheduler = newThread } opts = modFn defOpts scheduler = _timeoutScheduler opts interval = _timeoutInterval opts shouldResetTimeout = _resetTimeoutWhen opts main sourceDisposable timerDisposable observer = do resetTimeout subscribe source onNext_ onError_ onCompleted_ where onTimeout = if _completeOnTimeout opts then onCompleted observer else onError observer $ toException TimeoutError resetTimeout = do timer <- scheduleTimed scheduler interval $ do onTimeout void $ dispose sourceDisposable -- This will automatically dispose the previous timer setDisposable timerDisposable timer onNext_ v = do onNext observer v when (shouldResetTimeout v) resetTimeout onError_ = onError observer onCompleted_ = onCompleted observer timeout :: TimeInterval -> Observable Async a -> Observable Async a timeout interval = timeoutWith (timeoutDelay interval)
roman/Haskell-Reactive-Extensions
rx-core/src/Rx/Observable/Timeout.hs
mit
3,332
0
16
775
700
365
335
79
2
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedStrings #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module GhostLang.Types ( Declaration (..) , Label , ModuleSegment , Weight , GhostModule (..) , ModuleDecl (..) , ImportDecl (..) , TimeUnit (..) , Payload (..) , Pace (..) , Method (..) , Content (..) , Value (..) , Program (..) , Pattern (..) , Procedure (..) , Operation (..) ) where import Data.List (intercalate) import Data.Serialize (Serialize (..), getByteString, putByteString) import Data.Text (Text) import Data.Text.Encoding (encodeUtf8, decodeUtf8) import GHC.Generics (Generic) import GHC.Int (Int64) import Text.Parsec.Pos ( SourcePos , sourceName , sourceLine , sourceColumn , newPos ) import qualified Data.Text as T type Label = Text type ModuleSegment = FilePath type Weight = Int64 -- | Type class to help work with declarations. class Declaration a where expName :: a -> ModuleSegment srcPos :: a -> SourcePos srcPos = error "Not implemented" -- | Description of a ghost-lang module. This is the representation -- produced of the parser when reading a gl-file. One or several ghost -- modules are linked to a program. data GhostModule a where GhostModule :: ModuleDecl -> [ImportDecl] -> [Pattern a] -> [Procedure a] -> GhostModule a deriving (Eq, Generic, Show) -- | A module declaration, specifying the full path name of the -- module. data ModuleDecl = ModuleDecl !SourcePos ![ModuleSegment] deriving (Generic, Show) -- | An import declaration, specifying the full path name of the -- imported module. newtype ImportDecl = ImportDecl [ModuleSegment] deriving (Eq, Generic, Show) -- | A unit for describing size of time. data TimeUnit = USec Value | MSec Value | Sec Value deriving (Eq, Generic, Show) -- | A unit for describing payload size. data Payload = B Value | KB Value | MB Value | GB Value deriving (Eq, Generic, Show) -- | A unit for describing network pace. data Pace = Bps Value | Kbps Value | Mbps Value | Gbps Value deriving (Eq, Generic, Show) -- | Http method. data Method = GET | POST | PUT deriving (Eq, Generic, Show) -- | Content flags. High level descriptions for what become http -- headers flags on both client and server sides. data Content = Audio | Html | Image | M2M | Script | Video deriving (Eq, Generic, Show) -- | Representation of numeric values in ghost-lang. data Value = Literal !Int64 -- ^ A constant, literal, value. | Uniform !Int64 !Int64 -- ^ A randomly - uniform - distributed value. | Gaussian !Int64 !Int64 -- ^ A randomly - normal - distributed value. | Stored !Label -- ^ An indirect value, looked up from store by -- name. After lookup is become one of the above. deriving (Eq, Generic, Show) -- | A representation of a linked ghost-lang program. A program has a -- set of patterns. data Program a where Program :: ![Pattern a] -> Program a deriving (Eq, Generic, Show) -- | Pattern is a top level procedure carrying a statistical weight -- and a set of operations. data Pattern a where Pattern :: !SourcePos -> !Label -> !Weight -> ![Operation a] -> Pattern a deriving (Generic, Show) -- | Procedure is a ghost-lang construct for making reusable building -- blocks. A procedure is carrying a set of parameter names and a set -- of operations. data Procedure a where Procedure :: !Label -> ![Label] -> ![Operation a] -> Procedure a deriving (Eq, Generic, Show) -- | Operation is the lowest level entity that can be controlled in a -- ghost-lang program. data Operation a where Invoke :: !a -> Operation a -- ^ Invoke is the operation of invoking a simple element of the -- ghost language. E.g. invoking a http get. Loop :: !Value -> ![Operation a] -> Operation a -- ^ Loop the set of operations the number of times specified by -- the value. Concurrently :: ![Operation a] -> Operation a -- ^ Concurrently execute the operations. The Concurrently command -- is ready when the last concurrent operation is done. Call :: !(Procedure a) -> ![Value] -> Operation a -- ^ Call is the operation of calling a procedure. The procedure -- is given its arguments in a local context of the Interpreter -- monad. Unresolved :: !SourcePos -> !Label -> ![Value] -> Operation a -- ^ Unresolved procedure. deriving (Generic, Show) -- | Expand the list of module segments to a module name. expandModuleName :: [ModuleSegment] -> FilePath expandModuleName = intercalate "." -- | Declaration instances. instance Declaration ModuleDecl where expName (ModuleDecl _ xs) = expandModuleName xs srcPos (ModuleDecl pos _) = pos instance Declaration ImportDecl where expName (ImportDecl xs) = expandModuleName xs -- | Tailor made Eq instance for ModuleDecl. instance Eq ModuleDecl where (ModuleDecl _ x) == (ModuleDecl _ x') = x == x' -- | Taillor made Eq instance for Pattern. instance Eq a => Eq (Pattern a) where (Pattern _ x y z) == (Pattern _ x' y' z') = x == x' && y == y' && z == z' -- | Tailor made Eq instance for Operation. instance Eq a => Eq (Operation a) where (Invoke x) == (Invoke x') = x == x' (Loop x y) == (Loop x' y') = x == x' && y == y' (Concurrently x) == (Concurrently x') = x == x' (Call x y) == (Call x' y') = x == x' && y == y' (Unresolved _ x y) == (Unresolved _ x' y') = x == x' && y == y' _ == _ = False instance Serialize Value instance Serialize a => Serialize (Program a) instance Serialize a => Serialize (Pattern a) instance Serialize a => Serialize (Procedure a) instance Serialize a => Serialize (Operation a) -- | Serialize instance for Text as it's not included in the Cereal -- library. instance Serialize Text where put t = do put $ T.length t putByteString $ encodeUtf8 t get = do len <- get decodeUtf8 <$> getByteString len -- | Ditto SourcePos. instance Serialize SourcePos where put s = do put $ sourceName s put $ sourceLine s put $ sourceColumn s get = newPos <$> get <*> get <*> get
kosmoskatten/ghost-lang
ghost-lang/src/GhostLang/Types.hs
mit
6,592
0
12
1,817
1,548
844
704
168
1
import System.Posix.Eventfd import System.Environment import System.Exit import Control.Concurrent import Control.Monad main = do xs <- getArgs when (length xs < 1) $ do putStrLn "Usage, <num>..." exitFailure efd <- eventfdSem 0 [] -- TODO: use safe flags forkIO $ do forM_ xs $ \x -> do putStrLn $ "Child writing " ++ x ++ " to efd" eventfdUpMany efd (fromIntegral $ read x) threadDelay 2000000 eventfdDown efd putStrLn $ "parent read" eventfdDown efd putStrLn $ "parent read"
qnikst/eventfd
examples/sym.hs
mit
533
0
17
132
170
78
92
20
1
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Views.Pages.NotFound (notFoundView) where import BasicPrelude import Text.Blaze.Html5 (Html, (!)) import qualified Text.Blaze.Html5 as H import qualified Text.Blaze.Html5.Attributes as A import Routes (Route) import Views.Layout (layoutView) notFoundView :: Route -> Html notFoundView currentRoute = let pageTitle = "Not Found" in layoutView currentRoute pageTitle $ do H.div ! A.class_ "page-header" $ H.h1 "Not found" H.p $ "We couldn't find what you were looking for."
nicolashery/example-marvel-haskell
Views/Pages/NotFound.hs
mit
566
0
13
88
139
81
58
15
1
module FileAPI.Client ( fileAPIClient ) where import Servant.Client (client) import FileAPI.API (fileAPIProxy) fileAPIClient = client fileAPIProxy
houli/distributed-file-system
dfs-shared/src/FileAPI/Client.hs
mit
154
0
5
23
39
23
16
5
1
{-# LANGUAGE OverloadedStrings #-} module Ketchup.Routing ( Route , match , prefix , route , useHandler ) where import qualified Data.ByteString.Char8 as B import Ketchup.Httpd import Ketchup.Utils import Network type Route = Socket -> HTTPRequest -> (B.ByteString -> Maybe B.ByteString) -> IO () type Matcher = B.ByteString -> (Bool, Arguments) data Arguments = None | Parameters [(B.ByteString, B.ByteString)] deriving Show -- |Router function -- Takes a list of routes and iterates through them for every requeust route :: [(Matcher, Route)] -- ^ Routes -> Handler route [] handle _ = sendNotFound handle route (r:routes) handle request | isMatch = (snd r) handle request (get params) | otherwise = route routes handle request where (isMatch, params) = (fst r) (uri request) -- |Wrap a handler in a route -- Lets you use a handler (no parameters) as a route useHandler :: Handler -> Route useHandler handler hnd req _ = handler hnd req -- |Create a matchable template with parameters (:param) match :: B.ByteString -> Matcher match template url = (isMatch, Parameters params) where (isMatch, params) = parse urlparts temparts [] urlparts = B.split '/' url temparts = B.split '/' template parse :: [B.ByteString] -> [B.ByteString] -> [(B.ByteString, B.ByteString)] -> (Bool, [(B.ByteString, B.ByteString)]) parse [] [] params = (True, params) parse [""] [] params = (True, params) parse _ [] _ = (False, []) parse [] _ _ = (False, []) parse (u:url) (t:temp) params | B.length t < 1 = parse url temp params | B.length u < 1 = parse url (t:temp) params | B.head t == ':' = parse url temp ((B.tail t, u) : params) | u == t = parse url temp params | otherwise = (False, []) -- |Create a prefix matcher prefix :: B.ByteString -> Matcher prefix urlPrefix url = (B.isPrefixOf urlPrefix url, None) get :: Arguments -> B.ByteString -> Maybe B.ByteString get (Parameters params) x = lookup x params get None _ = Nothing
Hamcha/ketchup
Ketchup/Routing.hs
mit
2,207
0
11
616
758
405
353
49
1
{-# LANGUAGE ScopedTypeVariables, TemplateHaskell #-} module Sort_MergeSortBottomUp2 where import Test.QuickCheck import Test.QuickCheck.Poly import Test.QuickCheck.All import Data.List( sort, delete ) -------------------------------------------------------------------------------- msort :: Ord a => [a] -> [a] msort = merging . risers {- risers :: Ord a => [a] -> [[a]] risers [] = [] risers [x] = [[x]] risers (x:xs) = case risers xs of (y:ys):yss | x <= y -> (x:y:ys):yss yss -> [x]:yss -} risers :: Ord a => [a] -> [[a]] risers [] = [] risers [x] = [[x]] risers (x:y:xs) | x <= y = case risers (y:xs) of ys:yss -> (x:ys):yss | otherwise = [x] : risers (y:xs) merging :: Ord a => [[a]] -> [a] merging [] = [] merging [xs] = xs merging xss = merging (pairwise xss) pairwise (xs:ys:xss) = xs `merge` ys : pairwise xss pairwise xss = xss merge :: Ord a => [a] -> [a] -> [a] [] `merge` ys = ys xs `merge` [] = xs (x:xs) `merge` (y:ys) | x <= y = x : xs `merge` (y:ys) | otherwise = y : (x:xs) `merge` ys -------------------------------------------------------------------------------- ordered :: Ord a => [a] -> Bool ordered [] = True ordered [x] = True ordered (x:y:xs) = x <= y && ordered (y:xs) count :: Eq a => a -> [a] -> Integer count x [] = 0 count x (y:xs) | x == y = 1 + count x xs | otherwise = count x xs isPermutation :: Eq a => [a] -> [a] -> Bool [] `isPermutation` ys = null ys (x:xs) `isPermutation` ys = x `elem` ys && xs `isPermutation` delete x ys -------------------------------------------------------------------------------- prop_SortSorts (xs :: [OrdA]) = ordered (msort xs) prop_SortPermutes x (xs :: [OrdA]) = count x (msort xs) == count x xs prop_SortPermutes' (xs :: [OrdA]) = msort xs `isPermutation` xs prop_SortIsSort (xs :: [OrdA]) = msort xs == sort xs -------------------------------------------------------------------------------- return [] testAll = $(quickCheckAll) --------------------------------------------------------------------------------
koengit/induction-examples
Sort_MergeSortBottomUp2.hs
mit
2,197
0
11
532
846
449
397
48
1
mySqr :: [Integer] mySqr = [x^2 | x <- [1..5]] myCube :: [Integer] myCube = [y^3 | y <- [1..5]] main :: IO () main = do print (length [(x, y) | x <- mySqr, y <- myCube, x < 50 && y < 50])
candu/haskellbook
ch9/squareCube.hs
mit
191
0
14
48
135
73
62
7
1
{- This file is part of the Haskell package thetvdb. It is subject to the license terms in the LICENSE file found in the top-level directory of this distribution and at git://pmade.com/thetvdb/LICENSE. No part of themoviedb package, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the LICENSE file. -} module Network.API.TheTVDB.Types.Season (Season(..), SeasonID, SeasonNum) where import Network.API.TheTVDB.Types.API (UniqueID) import Network.API.TheTVDB.Types.Episode (Episode) type SeasonID = UniqueID type SeasonNum = UniqueID data Season = Season { seasonID :: SeasonID , seasonNumber :: SeasonNum , episodeList :: [Episode] } deriving (Eq, Show)
pjones/thetvdb
Network/API/TheTVDB/Types/Season.hs
mit
740
0
9
117
104
69
35
10
0
module Main where import GameInit import Linear.V2 import qualified Physics.Demo.IOWorld as IOWorld main :: IO () main = let window = V2 800 600 scale = V2 40 40 in runMain "physics test" window $ IOWorld.demoMain (fmap fromIntegral window) scale
ublubu/shapes
shapes-demo/src/Main.hs
mit
280
0
10
73
86
46
40
9
1
module Raindrops (convert) where convert :: Integer -> String convert n = if result == "" then show n else result where result = factorThree n ++ factorFive n ++ factorSeven n factorThree :: Integer -> String factorThree n = if n `mod` 3 == 0 then "Pling" else "" factorFive :: Integer -> String factorFive n = if n `mod` 5 == 0 then "Plang" else "" factorSeven :: Integer -> String factorSeven n = if n `mod` 7 == 0 then "Plong" else ""
xTVaser/Exercism-Solutions
haskell/raindrops/src/Raindrops.hs
mit
482
0
9
130
170
94
76
16
2
{- First we define the function prototype-} len :: [a] -> Integer {-Then we define the patterns for functions-} len [] = 0 len (x:xs) = 1 + len xs {- the main function -} main = print (len [1,2,3,4])
creativcoder/recurse
list_len.hs
mit
202
0
8
42
74
41
33
4
1
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies, OverloadedStrings, ScopedTypeVariables #-} import Yesod.Core import Yesod.WebSockets import qualified Data.Text.Lazy as TL import Control.Monad (forever) import Control.Monad.Trans.Reader import Control.Concurrent (threadDelay) import Data.Time import Conduit import Data.Monoid ((<>)) import Control.Concurrent.STM.Lifted import Data.Text (Text) import UnliftIO.Exception (try, SomeException) data App = App (TChan Text) instance Yesod App mkYesod "App" [parseRoutes| / HomeR GET |] chatApp :: WebSocketsT Handler () chatApp = do sendTextData ("Welcome to the chat server, please enter your name." :: Text) name <- receiveData sendTextData $ "Welcome, " <> name App writeChan <- getYesod readChan <- atomically $ do writeTChan writeChan $ name <> " has joined the chat" dupTChan writeChan (e :: Either SomeException ()) <- try $ race_ (forever $ atomically (readTChan readChan) >>= sendTextData) (sourceWS $$ mapM_C (\msg -> atomically $ writeTChan writeChan $ name <> ": " <> msg)) atomically $ case e of Left _ -> writeTChan writeChan $ name <> " has left the chat" Right () -> return () getHomeR :: Handler Html getHomeR = do webSockets chatApp defaultLayout $ do [whamlet| <div #output> <form #form> <input #input autofocus> |] toWidget [lucius| \#output { width: 600px; height: 400px; border: 1px solid black; margin-bottom: 1em; p { margin: 0 0 0.5em 0; padding: 0 0 0.5em 0; border-bottom: 1px dashed #99aa99; } } \#input { width: 600px; display: block; } |] toWidget [julius| var url = document.URL, output = document.getElementById("output"), form = document.getElementById("form"), input = document.getElementById("input"), conn; url = url.replace("http:", "ws:").replace("https:", "wss:"); conn = new WebSocket(url); conn.onmessage = function(e) { var p = document.createElement("p"); p.appendChild(document.createTextNode(e.data)); output.appendChild(p); }; /* ******************************************************************************************************* The following code demonstrates one way to prevent timeouts. The "if" test is added to prevent chat participants from getting the ping message “dcba” every twenty seconds. It also prevents participants from receiving any message ending with “dcba” sent by any chat participant. “ e.data.split("").reverse().join("").substring(0,4)” changes, for example, “user:abc123dcba” to “abcd321cba:resu” and grabs the first four characters; i.e., “abcd”. Messages are broadcast only if the last four characters are not “dcba”. Note that the variable "t" controls the length of the timeout period. t = 3 allows one minute of inactivity. t = 30 allows ten minutes, and t = 180 allows an hour. The value inserted below is 360 (2 hours). */ conn.onmessage = function(e) { var p = document.createElement("p"); p.appendChild(document.createTextNode(e.data)); if (e.data.split("").reverse().join("").substring(0,4) != "abcd") { output.appendChild(p); } }; var t = 360 setInterval (function () { t = t - 1; if (t > 0) { conn.send("dcba"); } }, 20000); /* ****************************************************************************************************** */ form.addEventListener("submit", function(e){ conn.send(input.value); input.value = ""; e.preventDefault(); }); |] main :: IO () main = do chan <- atomically newBroadcastTChan warp 3000 $ App chan
yesodweb/yesod
yesod-websockets/chat-with-timeout-control.hs
mit
4,337
0
19
1,377
445
231
214
43
2
module Y2017.M02.D16.Exercise where import Data.Probability {-- A Genotype is a paired value a trait dominance, so that for some trait, T, the genotype of a purely dominant trait is TT, mixed (showing dominant) is Tt and a purely recessive trait is tt. Note that a mixed trait Tt is always written thus (so Tt == tT, as it were). Declare a Genotype that takes Char values to represent the alleles. --} data Genotype = GenotypeDeclarationYouCreate deriving (Eq, Ord, Show) {-- Now, in conjugation, each parent gene donates either allele with equal likelihood to the child gene. So, for example, you have two parents, one with genotype Aa, the other with genotype AA, the child has one of the following four possible genes: Aa x AA = (AA, AA, Aa, Aa) or, simplifying: (AA, Aa) each option equally likely So, today's Haskell problem. Given a genotype, one from each parent, define the function, cross, that gives a probability distribution of the resulting genotype in the offspring. --} cross :: Genotype -> Genotype -> Prob Genotype cross a b = undefined -- What is the Genotype of a child whose parents are a = "Aa" b = "AA"? -- (as a probability distribution) parentsA, parentsB :: (Genotype, Genotype) parentsA = undefined -- Aa, AA -- What is the Genotype of a child whose parents are a = "Aa" b ="Aa"? parentsB = undefined -- Aa, Aa -- Okay, what is the Genotype of the child of the children of parents A and B? -- That is to say, the grandchild of parentsA and parentsB? -- Hm, how would you define that, as the parents are now the children who -- are probability distributions. How do we call cross not with plain genotypes, -- but with probability distribuions? -- What is the probability that this grandchild is purely recessive? (aa) -- What is the probability that this grandchild is purely dominant? (AA) -- Taking a leaf from the flipThree example from Data.Probability: -- Let's say the parents have two children each (total of four children) -- And let's say those children marry (no incest) and let's say those new -- parents have two children each. -- 1. How many grandchildren are there? -- What is the probability that 1 or more of the children is mixed (Aa)? -- What is the probability that 2 or more of the children are mixed? atLeast :: Eq a => Prob a -> a -> Int -> Int -> Rational atLeast distribution outcome k n = undefined -- where k is the number of outcomes and n is the total number of events or -- specifically: k is the number of Aa genotype offspring and n is all children -- You can also make a an Ord instance if that helps you -- Hint: the probability of x outcomes is what? (think: combinations), so -- the probability of at least x outcomes is x outcomes + x+1 outcomes + ... -- 2. Of course, organisms have more than just one genotype. Tomorrow we'll -- consider multiple genotypes.
geophf/1HaskellADay
exercises/HAD/Y2017/M02/D16/Exercise.hs
mit
2,856
0
9
545
157
99
58
11
1
factorial :: Integer -> Integer factorial 1 = 1 factorial n = n * factorial (n - 1) solve = sum . map (read . return) . show . factorial $ 100
jwtouron/haskell-play
ProjectEuler/Problem20.hs
mit
144
0
11
33
71
36
35
4
1
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE Strict #-} module SmallGL.RenderingMapTiles ( MapTile (..) , RenderMTilesProgram () , initMapTilesProgram , renderMapTiles , addMapTile , clearMapTiles , setMapTileOpacity ) where import JavaScript.WebGL import Numeric.DataFrame import Commons.NoReflex import SmallGL.Types import SmallGL.Shader data MapTile = MapTile { mtVertexBuffer :: !WebGLBuffer , mtTexture :: !WebGLTexture } data RenderMTilesProgram = RenderMTilesProgram { rmtRenderProg :: !RenderingProgram , rmtTexOpacity :: !Scf , rmtTiles :: ![MapTile] , rmtTexCoordsBuffer :: !WebGLBuffer } setMapTileOpacity :: Scf -> RenderMTilesProgram -> RenderMTilesProgram setMapTileOpacity o p = p { rmtTexOpacity = o} initMapTilesProgram :: WebGLRenderingContext -> Scf -> IO RenderMTilesProgram initMapTilesProgram gl rmtTexOpacity = do shader <- initShaders gl [(gl_VERTEX_SHADER, vertexMapShaderText) ,(gl_FRAGMENT_SHADER, fragmentMapShaderText) ] [(attrLocCoords, "aVertexPosition") ,(attrLocTexCoords,"aTextureCoord") ] let uProjLoc = unifLoc shader "uProjM" uViewLoc = unifLoc shader "uViewM" uCustomLoc3 = unifLoc shader "uSampler" uCustomLoc4 = unifLoc shader "uClippingDist" uCustomLoc5 = unifLoc shader "uTexOpacity" rmtRenderProg = RenderingProgram {..} rmtTiles = [] rmtTexCoordsBuffer <- createBuffer gl texCoordsIO <- unsafeArrayThaw texCoords bindBuffer gl gl_ARRAY_BUFFER $ Just rmtTexCoordsBuffer bufferData' gl gl_ARRAY_BUFFER texCoordsIO gl_STATIC_DRAW return RenderMTilesProgram {..} addMapTile :: WebGLRenderingContext -> DataFrame Float '[4,4] -- ^ four vertices -> TexImageSource -> RenderMTilesProgram -> IO RenderMTilesProgram addMapTile gl vertices tileImg rmtProg = do mtVertexBuffer <- createBuffer gl verticesIO <- unsafeArrayThaw vertices bindBuffer gl gl_ARRAY_BUFFER $ Just mtVertexBuffer bufferData' gl gl_ARRAY_BUFFER verticesIO gl_STATIC_DRAW mtTexture <- createTexture gl bindTexture gl gl_TEXTURE_2D $ Just mtTexture pixelStorei gl gl_UNPACK_FLIP_Y_WEBGL 1 texImage2DImg gl gl_TEXTURE_2D 0 gl_RGBA gl_RGBA gl_UNSIGNED_BYTE tileImg setTexParameters gl bindTexture gl gl_TEXTURE_2D Nothing return rmtProg { rmtTiles = MapTile {..} : rmtTiles rmtProg } -- | Clear tiles only. Keeps the program itself clearMapTiles :: WebGLRenderingContext -> RenderMTilesProgram -> IO RenderMTilesProgram clearMapTiles gl rmtp = do bindTexture gl gl_TEXTURE_2D Nothing bindBuffer gl gl_ARRAY_BUFFER Nothing forM_ (rmtTiles rmtp) $ \MapTile{..} -> deleteBuffer gl mtVertexBuffer >> deleteTexture gl mtTexture return rmtp { rmtTiles = [] } renderMapTiles :: WebGLRenderingContext -> ProjMatrix -> ViewMatrix -> RenderMTilesProgram -> IO () renderMapTiles _ _ _ rmtp | null (rmtTiles rmtp) = return () renderMapTiles gl uProjM uViewM RenderMTilesProgram {..} = do useProgram gl . programId $ shader rmtRenderProg enableCoordsBuf gl enableTexCoordsBuf gl activeTexture gl gl_TEXTURE0 -- supply shader with uniforms uniformMatrix4fv gl (uProjLoc rmtRenderProg) False (getProjM uProjM) uniformMatrix4fv gl (uViewLoc rmtRenderProg) False (getViewM uViewM) uniform1fv gl (uClippingDistLoc rmtRenderProg) (projMToClippingDist uProjM) uniform1i gl (uSamplerLoc rmtRenderProg) 0 uniform1fv gl (uTexOpacityLoc rmtRenderProg) rmtTexOpacity depthMask gl False bindBuffer gl gl_ARRAY_BUFFER (Just rmtTexCoordsBuffer) >> setTexCoordsBuf gl forM_ rmtTiles $ \MapTile{..} -> do bindTexture gl gl_TEXTURE_2D $ Just mtTexture bindBuffer gl gl_ARRAY_BUFFER (Just mtVertexBuffer) >> setCoordsBuf gl drawArrays gl gl_TRIANGLE_STRIP 0 4 bindTexture gl gl_TEXTURE_2D Nothing depthMask gl True disableCoordsBuf gl disableTexCoordsBuf gl fragmentMapShaderText :: JSString fragmentMapShaderText = [jsstring| precision mediump float; uniform sampler2D uSampler; uniform float uTexOpacity; varying vec2 vTextureCoord; varying vec3 vDist; void main(void) { mediump float fade = clamp(#{x} - dot(vDist,vDist), 0.0, 1.0) * uTexOpacity; gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t)) * fade; } |] where x = toJSString $ show fadeConst vertexMapShaderText :: JSString vertexMapShaderText = [jsstring| precision mediump float; attribute vec4 aVertexPosition; attribute vec2 aTextureCoord; uniform mat4 uViewM; uniform mat4 uProjM; uniform float uClippingDist; varying vec3 vDist; varying vec2 vTextureCoord; void main(void) { vec4 globalPos = uViewM * aVertexPosition; gl_Position = uProjM * globalPos; vDist = globalPos.xyz/(globalPos.w*uClippingDist*#{x}); vTextureCoord = aTextureCoord; } |] where x = toJSString . show $ 1 / sqrt fadeConst texCoords :: DataFrame GLushort '[2,4] texCoords = vec2 minBound minBound <::> vec2 maxBound minBound <+:> vec2 minBound maxBound <+:> vec2 maxBound maxBound
achirkin/qua-view
src/SmallGL/RenderingMapTiles.hs
mit
5,481
0
14
1,255
1,134
555
579
129
1
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances #-} {- | Module : ./RelationalScheme/Logic_Rel.hs Description : Instance of class Logic for Rel Copyright : (c) Dominik Luecke, Uni Bremen 2008 License : GPLv2 or higher, see LICENSE.txt Maintainer : [email protected] Stability : experimental Portability : non-portable (imports Logic.Logic) Instance of class Logic for RelationalSchemes -} module RelationalScheme.Logic_Rel where import Common.DocUtils import Common.Id import Data.Monoid import qualified Data.Set as Set import Logic.Logic import RelationalScheme.AS import RelationalScheme.Sign import RelationalScheme.ParseRS import RelationalScheme.ATC_RelationalScheme () import RelationalScheme.StaticAnalysis data RelScheme = RelScheme deriving (Show) instance Language RelScheme where description _ = "Simple logic for Relational Schemes" -- | Instance of Category for Rel instance Category Sign -- sign RSMorphism -- mor where dom = domain cod = codomain ide = idMor composeMorphisms = comp_rst_mor -- | Instance of Sentences for Rel instance Sentences RelScheme Sentence Sign RSMorphism RSSymbol where -- there is nothing to leave out simplify_sen RelScheme _ form = form print_named _ = printAnnoted pretty . fromLabelledSen map_sen RelScheme = map_rel instance Monoid RSRelationships where mempty = RSRelationships [] nullRange mappend (RSRelationships l1 r1) (RSRelationships l2 r2) = RSRelationships (l1 ++ l2) $ appRange r1 r2 instance Monoid RSTables where mempty = emptyRSSign mappend (RSTables s1) (RSTables s2) = RSTables $ Set.union s1 s2 instance Monoid RSScheme where mempty = RSScheme mempty mempty nullRange mappend (RSScheme l1 s1 r1) (RSScheme l2 s2 r2) = RSScheme (mappend l1 l2) (mappend s1 s2) $ appRange r1 r2 -- | Syntax of Rel instance Syntax RelScheme RSScheme RSSymbol () () where parse_basic_spec RelScheme = Just parseRSScheme parse_symb_items _ = Nothing parse_symb_map_items _ = Nothing -- | Instance of Logic for Relational Schemes instance Logic RelScheme () -- Sublogics RSScheme -- basic_spec Sentence -- sentence () -- symb_items () -- symb_map_items Sign -- sign RSMorphism -- morphism RSSymbol -- symbol RSRawSymbol -- raw_symbol () -- proof_tree where stability RelScheme = Experimental -- | Static Analysis for Rel instance StaticAnalysis RelScheme RSScheme -- basic_spec Sentence -- sentence () -- symb_items () -- symb_map_items Sign -- sign RSMorphism -- morphism RSSymbol -- symbol RSRawSymbol -- raw_symbol where basic_analysis RelScheme = Just basic_Rel_analysis empty_signature RelScheme = emptyRSSign is_subsig RelScheme = isRSSubsig subsig_inclusion RelScheme = rsInclusion signature_union RelScheme = uniteSig
gnn/Hets
RelationalScheme/Logic_Rel.hs
gpl-2.0
3,396
0
9
1,071
554
299
255
68
0
module Web.Stegosaurus.Exceptions where import Prelude data PropNotFound = PropNotFound Text deriving (Typeable) instance Show PropNotFound where show (PropNotFound p') = let p = unpack p' in "Property " ++ p ++ " was not found in the response" instance Exception PropNotFound data CouldNotDecode = CouldNotDecode deriving (Typeable) instance Show CouldNotDecode where show _ = "Could not decode JSON" instance Exception CouldNotDecode data BadDate = BadDate String deriving (Typeable) instance Show BadDate where show (BadDate d) = "Could not parse date: " ++ d instance Exception BadDate
glittershark/stegosaurus
Web/Stegosaurus/Exceptions.hs
gpl-3.0
618
0
10
114
163
84
79
15
0
{-# LANGUAGE RecordWildCards, OverloadedStrings #-} module Bot.URLSpec where import Bot.URL import Test.Hspec import Test.QuickCheck import Bot.Types import Data.ByteString hiding (any, concat) import qualified Data.Text as T import Data.Time.Clock import Control.Auto import Control.Auto.Blip.Internal newtype TestBotResponse = TestBotResponse {getTestBotResponse :: (BotResponse ByteString)} deriving (Eq, Show) newtype TestBlip = TestBlip {getBlip :: Blip [String]} deriving (Show) instance Eq TestBlip where (TestBlip (Blip f)) == (TestBlip (Blip s)) = f == s (TestBlip NoBlip) == (TestBlip NoBlip) = True (TestBlip (Blip _)) == (TestBlip NoBlip) = False (TestBlip NoBlip) == (TestBlip (Blip _)) = False instance Arbitrary TestBotResponse where arbitrary = let html = "text/html; charset=utf-8" html1 = "text/html" gif = "image/gif" jpeg = "image/jpeg" video = "video/mp4" in do contentType' <- elements [html, html1, gif , jpeg, video] return $ TestBotResponse $ BotResponse "" contentType' "" propHandleResponse :: TestBotResponse -> Bool propHandleResponse resp = let texty = T.unwords $ handleResponse $ getTestBotResponse resp contentType' = contentType $ getTestBotResponse resp in if (any (== contentType') ["text/html", "text/html; charset=utf-8"]) then (T.isInfixOf "Title: " texty) == True else (T.isInfixOf "Content-Type: " texty) == True main :: IO () main = hspec spec spec :: Spec spec = describe "URL tests: " $ do urlTests botTests botTests :: Spec botTests = do describe "Bot reacts to URLs" $ do it "Is NoBlips non URLs" $ do now <- getCurrentTime f <- streamAuto (emitJusts (const Nothing)) (["a"] :: [String]) s <- streamAuto (urlBot :: RoomBot IO) [InMessage "slug" "Rando message" "#slug-chan" now] (fmap TestBlip f) `shouldBe` (fmap TestBlip s) it "Non empty for URLs" $ do pendingWith "Can't mock a network call." urlTests :: Spec urlTests = do describe "Extracts URL details given a URL" $ do it "Has Title for type html and Content-Type for anything else" $ property $ forAll (arbitrary :: Gen TestBotResponse) propHandleResponse
urbanslug/nairobi-bot
tests/Bot/URLSpec.hs
gpl-3.0
2,258
0
18
497
681
356
325
57
2
{-# LANGUAGE CPP #-} ---------------------------------------------------------------------- -- | -- Module : Text.TeX.Lexer.TokenParser.Execution -- Copyright : 2015-2017 Mathias Schenner, -- 2015-2016 Language Science Press. -- License : GPL-3 -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : GHC -- -- Expanding parsers for TeX tokens and execution of TeX primitives. ---------------------------------------------------------------------- module Text.TeX.Lexer.TokenParser.Execution ( -- * Expanding token parsers texLexer ) where #if MIN_VERSION_base(4,8,0) -- Prelude exports all required operators from Control.Applicative #else import Control.Applicative ((<*), (<*>), (*>), (<$), (<$>)) #endif import Control.Monad ((>=>)) import Data.Maybe (fromMaybe) import Data.Time (formatTime, defaultTimeLocale, ZonedTime(..), LocalTime(..), TimeOfDay(..)) import Text.TeX.Lexer.Catcode import Text.TeX.Lexer.Macro import Text.TeX.Lexer.Token import Text.TeX.Lexer.TokenParser.Basic import Text.TeX.Lexer.TokenParser.Core import Text.TeX.Lexer.TokenParser.Expansion import Text.TeX.Lexer.TokenParser.State ---------------------------------------- Expanding token parsers ---------- Main document parser -- | TeX Lexer: Convert TeX source document to a 'Token' stream. texLexer :: HandleTeXIO m => LexerT m [Token] texLexer = optional anyWhite *> tokens <* eof ---------- Multi-token parsers with expansion -- Parse many tokens using 'token'. tokens :: HandleTeXIO m => LexerT m [Token] tokens = concat <$> many token -- Parse a /logical unit/ of tokens. This is either a single 'Token' -- ('TeXChar', 'CtrlSeq', 'Param') or a group or a possibly empty list -- of tokens resulting from macro expansion. We try to parse as little -- structure as possible. Still we need to recognize groups because -- many lexer-level commands have group scope (e.g. @\\catcode@). token :: HandleTeXIO m => LexerT m [Token] token = anyWhite <|> group <|> ctrlseq <|> count 1 (param <|> someChar) -- Parse a balanced TeX group as a flat token list including delimiters. group :: HandleTeXIO m => LexerT m [Token] group = (fmap (++) . (:)) <$> bgroup <*> tokens <*> count 1 egroup -- Parse a control sequence and try to expand or execute it. -- If no definition is known, return the token unmodified. ctrlseq :: HandleTeXIO m => LexerT m [Token] ctrlseq = do t@(CtrlSeq name active) <- ctrlseqNoExpand st <- getState case lookupMacroCmd (name, active) st of Just m@MacroCmdUser{} -> [] <$ expand m Just (MacroCmdPrim p) | p `elem` primitiveConstants -> return [t] | otherwise -> executePrimitive p Just (MacroCmdChar ch cc) -> [] <$ prependTokens [TeXChar ch cc] Nothing -> return [t] ---------------------------------------- Execute TeX primitives -- | Meanings of primitives. -- -- These internal executable commands cannot be changed by the user. primitiveMeanings :: HandleTeXIO m => [(Primitive, LexerT m [Token])] primitiveMeanings = [ ("begingroup", [bgroupTok] <$ modifyState (pushGroup NativeGroup)) , ("endgroup", [egroupTok] <$ modifyState (popGroup NativeGroup)) , ("bgroup", [bgroupTok] <$ modifyState (pushGroup AnonymousGroup)) , ("egroup", [egroupTok] <$ modifyState (popGroup AnonymousGroup)) , ("(", return [mathTok]) , (")", return [mathTok]) , ("[", return (replicate 2 mathTok)) , ("]", return (replicate 2 mathTok)) , ("begin", beginEnvironment) , ("end", endEnvironment) , ("catcode", catcode) , ("def", def) , ("let", letmeaning) , ("iftrue", iftrue) , ("iffalse", iffalse) , ("char", count 1 chr) , ("number", numbertoks) , ("NewDocumentCommand", declareDocumentCommand MacroNew) , ("RenewDocumentCommand", declareDocumentCommand MacroRenew) , ("ProvideDocumentCommand", declareDocumentCommand MacroProvide) , ("DeclareDocumentCommand", declareDocumentCommand MacroDeclare) , ("NewDocumentEnvironment", declareDocumentEnvironment MacroNew) , ("RenewDocumentEnvironment", declareDocumentEnvironment MacroRenew) , ("ProvideDocumentEnvironment", declareDocumentEnvironment MacroProvide) , ("DeclareDocumentEnvironment", declareDocumentEnvironment MacroDeclare) , ("IfBooleanTF", xparseif trueTok) , ("IfNoValueTF", xparseif noValueTok) , ("newcommand", newcommand MacroNew) , ("renewcommand", newcommand MacroRenew) , ("providecommand", newcommand MacroProvide) , ("DeclareRobustCommand", newcommand MacroDeclare) , ("newenvironment", newenvironment MacroNew) , ("renewenvironment", newenvironment MacroRenew) , ("input", readInputFile) , ("include", readInputFile) , ("year", fmtDate "%Y") , ("month", fmtDate "%-m") -- no padding , ("day", fmtDate "%-d") -- no padding , ("time", fmtTime) , ("meaning", meaning) , ("undefined", error "undefined control sequence") ] -- Primitive constants have no direct implementation, -- they are used as constants in restricted contexts. primitiveConstants :: [Primitive] primitiveConstants = ["else", "fi"] -- A list of primitives that start a new conditional. conditionalHeads :: [Primitive] conditionalHeads = ["iftrue", "iffalse"] -- | Execute a primitive command. executePrimitive :: HandleTeXIO m => Primitive -> LexerT m [Token] executePrimitive name = fromMaybe throwError $ lookup name primitiveMeanings where throwError = error $ "Call to undefined primitive: " ++ name -------------------- Handle builtin macros ---------- Builtin macros: file input readInputFile :: HandleTeXIO m => LexerT m [Token] readInputFile = [] <$ (filename >>= (handleReadFile >=> prependString)) ---------- Builtin macros: date and time -- Convert current datetime to a string representation, -- using 'formatTime'. fmtDate :: HandleTeXIO m => String -> LexerT m [Token] fmtDate fmt = let dateFormat = formatTime defaultTimeLocale fmt in (mkString . maybe "" dateFormat) <$> handleReadDate -- Calculate time of day in minutes since midnight -- for the TeX primitive @time@. fmtTime :: HandleTeXIO m => LexerT m [Token] fmtTime = let toMinutes tod = 60 * todHour tod + todMin tod getMinutes = show . toMinutes . localTimeOfDay . zonedTimeToLocalTime in (mkString . maybe "" getMinutes) <$> handleReadDate ---------- Builtin macros: meaning -- Assign new meaning to a control sequence. letmeaning :: Monad m => LexerT m [Token] letmeaning = do (CtrlSeq name active) <- ctrlseqNoExpand <?> "macro name" equals m <- meaningCtrlSeq <|> meaningChar let val = case m of MeaningChar ch cc -> MacroCmdChar ch cc MeaningMacro cmd -> cmd MeaningUndef -> MacroCmdPrim "undefined" let key = (name, active) modifyState (registerLocalMacroCmd (key, val)) return [] -- Show the current meaning of a control sequence or a character. meaning :: Monad m => LexerT m [Token] meaning = (mkQuote . showMeaning) <$> (meaningCtrlSeq <|> meaningChar) -- Parse a control sequence and return its meaning. meaningCtrlSeq :: Monad m => LexerT m Meaning meaningCtrlSeq = do (CtrlSeq name active) <- ctrlseqNoExpand st <- getState return (getMacroMeaning st (name, active)) -- Parse a single raw character (catcode-independent) and return its meaning. meaningChar :: Monad m => LexerT m Meaning meaningChar = getCharMeaning <$> getState <*> rawChar ---------- Builtin macros: numbers -- Parse a character by its number. We are treating these characters -- as Tokens with catcode 'Other'. chr :: Monad m => LexerT m Token chr = number >>= \chCode -> return (TeXChar (toEnum chCode) Other) -- Convert an internal integer to its string representation. numbertoks :: Monad m => LexerT m [Token] numbertoks = (map (`TeXChar` Other) . show) <$> number ---------- Builtin macros: conditionals -- Expand a conditional to its left branch. iftrue :: HandleTeXIO m => LexerT m [Token] iftrue = [] <$ conditionalPush True -- Expand a conditional to its right branch. iffalse :: HandleTeXIO m => LexerT m [Token] iffalse = [] <$ conditionalPush False -- Expand a conditional and push the resulting token list -- back into the stream (for group detection). conditionalPush :: HandleTeXIO m => Bool -> LexerT m () conditionalPush = conditional >=> prependTokens -- Parse a conditional and return its left or right branch, -- depending on the provided flag. -- -- The flag argument indicates whether the condition is true. -- Note: Conditional nesting is independent of grouping, -- so groups are not parsed here. conditional :: HandleTeXIO m => Bool -> LexerT m [Token] conditional b = do let (l,r) = (b, not b) -- branch expansion modes (leftToks, rightToks) <- condBranches (l,r) [] return $ if b then leftToks else rightToks -- Parse the two branches of a conditional. -- -- The flag arguments indicate whether to expand the branches. condBranches :: HandleTeXIO m => (Bool, Bool) -> [Token] -> LexerT m ([Token], [Token]) condBranches (expandLeft,expandRight) ltoks = do t <- tokenCond expandLeft s <- getState let means = meansPrimitive s case t of [CtrlSeq n a] | (n, a) `means` "fi" -> return (ltoks, []) | (n, a) `means` "else" -> (,) ltoks <$> condRightBranch expandRight [] | any ((n, a) `means`) conditionalHeads -> -- handle embedded conditional in dead branch condBranches (False, False) [] *> condBranches (expandLeft, expandRight) ltoks | otherwise -> condBranches (expandLeft, expandRight) (ltoks ++ t) _ -> condBranches (expandLeft, expandRight) (ltoks ++ t) -- Parse the second branch of a conditional. -- -- The flag argument indicates whether to expand the branch. condRightBranch :: HandleTeXIO m => Bool -> [Token] -> LexerT m [Token] condRightBranch expandMode toks = do t <- tokenCond expandMode s <- getState let means = meansPrimitive s case t of [CtrlSeq n a] | (n, a) `means` "fi" -> return toks | any ((n, a) `means`) conditionalHeads -> -- handle embedded conditional in dead branch condBranches (False, False) [] *> condRightBranch expandMode toks | otherwise -> condRightBranch expandMode (toks ++ t) _ -> condRightBranch expandMode (toks ++ t) -- Parse a token in a conditional. -- -- Note: Grouping characters are parsed literally. tokenCond :: HandleTeXIO m => Bool -> LexerT m [Token] tokenCond expandMode = parseUntilNonEmpty $ anyWhite <|> (if expandMode then ctrlseq else count 1 ctrlseqNoExpand) <|> count 1 (charcc Bgroup <|> charcc Egroup <|> param <|> someChar) -- Evaluate an xparse-style conditional. -- -- Note: In LaTeX3 this is defined via @\\ifx@. xparseif :: HandleTeXIO m => Token -> LexerT m [Token] xparseif t = do rs <- token <* skipOptSpace stripBraces <$> case stripBraces rs of [r] | r == t -> token <* skipOptSpace <* token _ -> token *> skipOptSpace *> token ---------- Builtin macros: catcodes -- Parse the body of a @catcode@ command, execute it (by changing the -- current catcode table) and remove catcode command from the token stream. catcode :: Monad m => LexerT m [Token] catcode = do chCode <- number equals ccNew <- number -- Note: @toEnum@ will fail if not in range 0-15. modifyState (addCatcode (toEnum chCode, toEnum ccNew)) return [] ---------- Builtin macros: TeX macro definitions -- Parse a macro definition, execute it (by updating the list of -- active macros) and remove the @def@ command from the token stream. def :: Monad m => LexerT m [Token] def = do (CtrlSeq name active) <- ctrlseqNoExpand <?> "macro name" context <- macroContextDefinition <?> "macro context definition" body <- grouped tokensNoExpand let key = (name, active) modifyState (registerLocalMacroCmd (key, MacroCmdUser key (def2xparse context) body)) return [] -- Parse a macro context definition. Similar to 'tokens', but -- must not contain 'Bgroup' (so do not include 'group' parser). macroContextDefinition :: Monad m => LexerT m [Token] macroContextDefinition = concat <$> many (anyWhite <|> count 1 (param <|> ctrlseqNoExpand <|> someChar)) -- Convert def-style macro context to an xparse argspec. -- -- The number of tokens mapped to a parameter depends on its successor. -- * single token if followed by another 'Param' or nil, or -- * list of tokens if followed by a literal token ('CtrlSeq' or 'TeXChar'). def2xparse :: [Token] -> ArgSpec def2xparse [] = [] def2xparse [Param _ _] = [Mandatory] def2xparse [t] = [LiteralToken t] def2xparse (Param _ _:ts@(Param _ _:_)) = Mandatory : def2xparse ts def2xparse (Param _ _:t2:ts) = Until [t2] : def2xparse ts def2xparse (t1:ts@(_:_)) = LiteralToken t1 : def2xparse ts ---------- Builtin macros: LaTeX3 (xparse) macro definitions -- Parse and register an xparse macro definition. declareDocumentCommand :: HandleTeXIO m => MacroDefinitionMode -> LexerT m [Token] declareDocumentCommand defMode = do (CtrlSeq name active) <- optGrouped ctrlseqNoExpand <?> "macro name" context <- argspec <?> "macro argspec" body <- grouped tokensNoExpand let key = (name, active) modifyState $ registerMacroCmd defMode (key, MacroCmdUser key context body) return [] -- Parse and register an xparse environment definition. declareDocumentEnvironment :: HandleTeXIO m => MacroDefinitionMode -> LexerT m [Token] declareDocumentEnvironment defMode = do name <- grouped tokensNoExpand <?> "environment name" context <- argspec <?> "environment argspec" startCode <- grouped tokensNoExpand <?> "environment start code" endCode <- grouped tokensNoExpand <?> "environment end code" modifyState $ registerMacroEnv defMode (name, MacroEnv name context startCode endCode) return [] -- Parse a full xparse-style argument specification. argspec :: HandleTeXIO m => LexerT m ArgSpec argspec = grouped (skipOptSpace *> many argtype) <* skipOptSpace -- Parse a single xparse-style argument type. -- -- Not implemented: 'v' (verbatim), '>' (argument processor). argtype :: HandleTeXIO m => LexerT m ArgType argtype = optional (char '+' Other) *> choice [ Mandatory <$ letter 'm' , Until <$> (letter 'u' *> (grouped tokens <|> count 1 singleToken)) , UntilCC Bgroup <$ letter 'l' , Delimited <$> (letter 'r' *> singleToken) <*> singleToken <*> return Nothing , Delimited <$> (letter 'R' *> singleToken) <*> singleToken <*> (Just <$> grouped tokens) , OptionalGroup (mkOther '[') (mkOther ']') Nothing <$ letter 'o' , OptionalGroup <$> (letter 'd' *> singleToken) <*> singleToken <*> return Nothing , OptionalGroup (mkOther '[') (mkOther ']') <$> (letter 'O' *> (Just <$> grouped tokens)) , OptionalGroup <$> (letter 'D' *> singleToken) <*> singleToken <*> (Just <$> grouped tokens) , OptionalGroupCC Bgroup Egroup Nothing <$ letter 'g' , OptionalGroupCC Bgroup Egroup <$> (letter 'G' *> (Just <$> grouped tokens)) , OptionalToken (mkOther '*') <$ letter 's' , OptionalToken <$> (letter 't' *> singleToken) ] <* skipOptSpace ---------- Builtin macros: LaTeX2e macro definitions -- Parse and register a LaTeX2e macro definition. newcommand :: Monad m => MacroDefinitionMode -> LexerT m [Token] newcommand defMode = do optional (char '*' Other) -- ignore 'long' property (CtrlSeq name active) <- optGrouped ctrlseqNoExpand <?> "macro name" context <- latexMacroContext body <- grouped tokensNoExpand <|> count 1 singleToken let key = (name, active) modifyState $ registerMacroCmd defMode (key, MacroCmdUser key context body) return [] -- Parse and register a LaTeX2e environment definition. newenvironment :: HandleTeXIO m => MacroDefinitionMode -> LexerT m [Token] newenvironment defMode = do name <- grouped tokens <?> "environment name" context <- latexMacroContext startCode <- grouped tokensNoExpand <?> "environment start code" endCode <- grouped tokensNoExpand <?> "environment end code" modifyState $ registerMacroEnv defMode (name, MacroEnv name context startCode endCode) return [] -- Parse a LaTeX2e macro context definition and -- convert it to an xparse-style ArgSpec. latexMacroContext :: Monad m => LexerT m ArgSpec latexMacroContext = do numArgs <- option 0 (bracketed singleDigit) let open = mkOther '[' close = mkOther ']' optArg <- optionMaybe (balanced open close) let context = case optArg of Just d -> OptionalGroup open close (Just d) : replicate (numArgs-1) Mandatory Nothing -> replicate numArgs Mandatory return context -------------------- Handle LaTeX environments (named groups) -- Start TeX group and try to expand user-defined environment definitions. beginEnvironment :: HandleTeXIO m => LexerT m [Token] beginEnvironment = do name <- envName -- Note: expansion must be enabled because we are expanding 'begin' st <- getState case lookupMacroEnv (stripBraces name) st of Nothing -> let grp = NamedGroup (stripBraces name) in modifyState (pushGroup grp) *> return (mkCtrlSeq "begin": name) Just envdef -> do (startCode, endCode) <- expandEnvironment envdef modifyState . pushGroup $ DefinedGroup (stripBraces name) startCode endCode prependTokens startCode return [] -- Close matching TeX group and inject end code for user-defined environments. endEnvironment :: HandleTeXIO m => LexerT m [Token] endEnvironment = do name <- envName let endEnv = mkCtrlSeq "end": name grp <- getGroup <$> getState case grp of (DefinedGroup name' _ endCode) -> if null endCode then do -- close group modifyState . popGroup $ DefinedGroup (stripBraces name) [] [] return [] else do -- inject end code modifyState . setGroup $ DefinedGroup name' [] [] prependTokens (endCode ++ endEnv) return [] _ -> -- stack group is not a defined group if getGroupName grp == stripBraces name then do modifyState (popGroup (NamedGroup (stripBraces name))) return endEnv else do -- There is a mismatch between the stream group and the stack group. -- This may happen if the start code and end code portions of a -- newenvironment definition span named groups themselves. -- We attempt to find the stream group as a defined group in the -- full lexer stack and inject its end code into the stream. let definedGroup = DefinedGroup (stripBraces name) [] [] endCode <- getGroupEndCode definedGroup <$> getState modifyState (clearGroup definedGroup) prependTokens (endCode ++ endEnv) return [] -- Parse the name of a LaTeX environment, including group delimiters. -- -- We currently allow arbitrary token lists. envName :: HandleTeXIO m => LexerT m [Token] envName = group
synsem/texhs
src/Text/TeX/Lexer/TokenParser/Execution.hs
gpl-3.0
19,177
0
19
3,937
4,692
2,447
2,245
330
4
-- |Vorple is a framework for serving a JSON application over HTTP via WAI. module Web.Vorple ( -- * Types Vorple() -- * Running a Vorple application , defaultOptions , vorple , vorpleIO , vorpleT -- * Options , Options(..) , LogLevel(..) , MonadOptions(..) -- * HTTP errors , HttpStatus(..) , throwStatus , catchStatus -- * Logging functions -- $logging , debug , info , warn , err , crit -- * Re-exported functions , ask , asks , get , put , modify , liftIO , deriveJSON ) where import Control.Monad.Error import Control.Monad.Identity import Control.Monad.Reader import Control.Monad.State import Data.Maybe import System.IO (stderr) import System.Random import qualified Data.ByteString.Lazy as BS import qualified Network.HTTP.Types as H import qualified Network.Wai as W import Web.Vorple.Cookie import Web.Vorple.Log import Web.Vorple.Text import Web.Vorple.Types import Web.Vorple.Util -- $logging -- Logging uses the 'MonadWriter' instance of 'Vorple'. Convenience functions -- for logging are provided which use Template Haskell. These functions accept -- a /format string/ in the style of @printf@, but with a different set of -- format codes. The Template Haskell function then splices in a function which -- takes one argument for each hole left by a format code in the format string. -- -- The format codes: -- -- * @%b@ accepts a 'ByteString' -- -- * @%s@ accepts a 'Text' -- -- * @%j@ accepts anything with a 'ToJSON' instance -- -- * @%p@ accepts anything with a 'Show' instance require :: (MonadError HttpStatus m) => Bool -> m () require c = when (not c) $ throwStatus H.status400 requireJust :: (MonadError HttpStatus m) => Maybe a -> m a requireJust = maybe (throwStatus H.status400) return randomKey :: Int -> IO [Word8] randomKey n = mapM (const $ getStdRandom random) [1 .. n] -- |Make an 'Application' from a request handler with any inner monad vorpleT :: forall a b e m s. (Monad m, FromJSON a, ToJSON b, FromJSON s, ToJSON s, Eq s) => (forall x. m x -> IO x) -- ^The runner for the inner monad -> Options -- ^Options -> e -- ^The initial environment -> s -- ^The default session state -> (a -> Vorple e s m b) -- ^The request handler -> W.Application -- ^The application vorpleT runner opts env emptySession handler req = liftIO $ do appKey <- maybe (randomKey 32) return $ optAppKey opts (result, log) <- flip (flip runInternal opts) env $ do when (W.requestMethod req /= H.methodPost) $ throwStatus H.status405 $(say "Got a POST") maybeInput <- liftIO $ decodeJSONSource $ W.requestBody req input <- maybe (throwStatus H.status400) return maybeInput $(say "Got JSON data") cookie <- getCookie appKey (W.requestHeaders req) (setCsrfKey, session) <- case cookie of Nothing -> do $(say "No cookie; generating CSRF key") csrfKey <- liftIO (randomKey 32) >>= return . encodeBase64 return (csrfKey, emptySession) Just cookie -> do $(say "Got cookie") session <- requireJust $ decodeJSON $ cAppData cookie return (cCsrfKey cookie, session) (response, nextSession) <- case csrfData input of Nothing -> return (Nothing, session) Just inputData -> do require $ csrfKey input == setCsrfKey $(say "CSRF key matches") (response, nextSession) <- mapInternal (liftIO . runner) $ runVorpleInternal (handler inputData) session return (Just response, nextSession) $(say "Ran request handler") cookie <- if isNothing cookie || session /= nextSession then setCookie appKey setCsrfKey (encodeJSON nextSession) >>= return . Just else return Nothing $(say "About to return response") return (Csrf setCsrfKey response, cookie) BS.hPutStr stderr log let { (status, body, cookie) = case result of Left status -> (getStatus status, encodeJSONBuilder (), Nothing) Right (body, cookie) -> (H.status200, encodeJSONBuilder body, cookie) } return $ W.ResponseBuilder status (maybeToList cookie) body -- |Make an 'Application' from an 'IO' request handler vorpleIO :: (FromJSON a, ToJSON b, FromJSON s, ToJSON s, Eq s) => Options -- ^Options -> e -- ^The initial environment -> s -- ^The default session state -> (a -> Vorple e s IO b) -- ^The request handler -> W.Application -- ^The application vorpleIO = vorpleT id -- |Make an 'Application' from a pure request handler vorple :: (FromJSON a, ToJSON b, FromJSON s, ToJSON s, Eq s) => Options -- ^Options -> e -- ^The initial environment -> s -- ^The default session state -> (a -> Vorple e s Identity b) -- ^The request handler -> W.Application -- ^The application vorple = vorpleT $ return . runIdentity
ktvoelker/vorple
src/Web/Vorple.hs
gpl-3.0
5,073
0
23
1,336
1,281
686
595
-1
-1
{-# LANGUAGE NoImplicitPrelude, TupleSections #-} module Graphics.UI.GLFW.Events ( KeyEvent(..), Event(..), Result(..) , eventLoop ) where import Prelude.Compat import Data.IORef import qualified Graphics.UI.GLFW as GLFW -- this is the reification of the callback information: data GLFWRawEvent = RawCharEvent Char | RawKeyEvent GLFW.Key Int GLFW.KeyState GLFW.ModifierKeys | RawWindowRefresh | RawWindowClose deriving (Show, Eq) data KeyEvent = KeyEvent { keKey :: GLFW.Key , keScanCode :: Int , keState :: GLFW.KeyState , keModKeys :: GLFW.ModifierKeys , keChar :: Maybe Char } deriving (Show, Eq) -- This is the final representation we expose of events: data Event = EventKey KeyEvent | EventWindowClose | EventWindowRefresh deriving (Show, Eq) data Result = ResultNone | ResultDidDraw | ResultQuit deriving (Show, Eq, Ord) instance Monoid Result where mempty = ResultNone mappend = max fromChar :: Char -> Maybe Char fromChar char -- Range for "key" characters (keys for left key, right key, etc.) | '\57344' <= char && char <= '\63743' = Nothing | otherwise = Just char translate :: [GLFWRawEvent] -> [Event] translate [] = [] translate (RawWindowClose : xs) = EventWindowClose : translate xs translate (RawWindowRefresh : xs) = EventWindowRefresh : translate xs translate (RawKeyEvent key scanCode keyState modKeys : RawCharEvent char : xs) = EventKey (KeyEvent key scanCode keyState modKeys (fromChar char)) : translate xs translate (RawKeyEvent key scanCode keyState modKeys : xs) = EventKey (KeyEvent key scanCode keyState modKeys Nothing) : translate xs translate (RawCharEvent _ : xs) = translate xs atomicModifyIORef_ :: IORef a -> (a -> a) -> IO () atomicModifyIORef_ var f = atomicModifyIORef var ((, ()) . f) rawEventLoop :: GLFW.Window -> ([GLFWRawEvent] -> IO Result) -> IO () rawEventLoop win eventsHandler = do eventsVar <- newIORef [RawWindowRefresh] let addEvent event = atomicModifyIORef_ eventsVar (event:) addKeyEvent key scanCode keyState modKeys = addEvent $ RawKeyEvent key scanCode keyState modKeys charEventHandler = addEvent . RawCharEvent setCallback f cb = f win $ Just $ const cb loop = do GLFW.pollEvents let handleReversedEvents rEvents = ([], reverse rEvents) events <- atomicModifyIORef eventsVar handleReversedEvents res <- eventsHandler events case res of ResultNone -> loop ResultDidDraw -> GLFW.swapBuffers win >> loop ResultQuit -> return () setCallback GLFW.setCharCallback charEventHandler setCallback GLFW.setKeyCallback addKeyEvent setCallback GLFW.setWindowRefreshCallback $ addEvent RawWindowRefresh setCallback GLFW.setWindowSizeCallback . const . const $ addEvent RawWindowRefresh setCallback GLFW.setWindowCloseCallback $ addEvent RawWindowClose loop eventLoop :: GLFW.Window -> ([Event] -> IO Result) -> IO () eventLoop win handler = rawEventLoop win (handler . translate)
rvion/lamdu
bottlelib/Graphics/UI/GLFW/Events.hs
gpl-3.0
3,300
0
17
870
900
461
439
76
3
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeOperators #-} module Main where import NLP.Summarizer import Control.Monad import Data.Maybe (fromMaybe) import qualified Data.Text.IO as TIO import Options.Generic data CliOptions = CliOptions { ratio :: Maybe Int <?> "summarization % [default = 20%]" , dictionary :: Maybe Text <?> "dictionary to use" , input :: Text <?> "text to summarize" -- , output :: Maybe FilePath <?> "output file [default = stdout]" -- , html :: Bool <?> "output as html" , keywords :: Maybe Bool <?> "only output keywords" -- , about :: Bool <?> "only output the summary" -- , version :: Bool <?> "show version information" } deriving (Generic, Show) instance ParseRecord CliOptions main :: IO () main = do cliOptions <- (getRecord "Test program" :: IO CliOptions) let txt = unHelpful (input cliOptions) dict = fromMaybe "es" $ unHelpful (dictionary cliOptions) ratio' = fromMaybe 20 $ unHelpful (ratio cliOptions) showKeywords = fromMaybe False $ unHelpful (keywords cliOptions) let result = summarize dict ratio' txt mapM_ TIO.putStrLn (summarySentences result) when showKeywords $ mapM_ TIO.putStrLn (summaryKeywords result) pure ()
rainbyte/summarizer
app/Main.hs
gpl-3.0
1,282
0
13
243
300
156
144
29
1
{-# LANGUAGE RankNTypes, KindSignatures #-} data El :: * -> * data Dl :: * -> * data Bl :: * -> * data B :: * -> * data C :: * -> * data Cl :: * -> * data X :: * -> * data Y :: * -> * data D :: * -> * data Al :: * -> * f :: forall p0 c b d y . (forall p x . p (B (Bl x)) -> p (Bl (B x))) -> (forall p x . p (B ( c (D x))) -> p (Bl ( c (Dl x)))) -> (forall p x . p (Dl (D x)) -> p (D (Dl x))) -> (forall p x . p (Al (Bl (Bl x))) -> p (Al (X x))) -> (forall p x . p (X (Bl x)) -> p (B (X x))) -> (forall p x . p (B ( b (Cl ( d (D x))))) -> p (Bl (Bl ( c (Dl (Dl x)))))) -> (forall p x . p (Dl (Y x)) -> p (Y (D x))) -> (forall p x . p (Dl (Dl (El x))) -> p (Y (El x))) -> (forall p x . p (X ( c (Y x))) -> p ( b (Cl ( d x)))) -> p0 (Al (Bl (Bl ( c (Dl (Dl (El y))))))) f = undefined k8 :: forall p x . p (Dl (Dl (El x))) -> p (Y (El x)) k8 = undefined k7 :: forall p x . p (Dl (Y x)) -> p (Y (D x)) k7 = undefined k6 :: forall p x . p (X (Cl (Y x))) -> p (Bl (Cl (Dl x))) k6 = undefined k5 :: forall p x . p (X (Bl x)) -> p (B (X x)) k5 = undefined k4 :: forall p x . p (Al (Bl (Bl x))) -> p (Al (X x)) k4 = undefined k3 :: forall p x . p (Dl (D x)) -> p (D (Dl x)) k3 = undefined k2 :: forall p x . p (B (Cl (D x))) -> p (Bl (Cl (Dl x))) k2 = undefined k1 :: forall p x . p (B (Bl x)) -> p (Bl (B x)) k1 = undefined h :: (Al (Bl (Bl ( Cl (Dl (Dl (El y))))))) h = f k1 k2 k3 k4 k5 (\ c -> k2 (k1 (k3 c))) k7 k8 k6 -- f a1 a2 a3 a4 a5 b a7 a8 a6 = b (a6 (a5 (a7 (a4 (a8 (f a1 (\ c1 -> a2 (a1 (a3 c1))) a3 a4 a5 (\ c1 -> a2 (a1 (a3 (a1 (a3 (b c1)))))) a7 a8 (\ c1 -> a6 (a5 (a7 c1)))))))))
Fermat/FCR
tests/cxt.hs
gpl-3.0
1,641
0
29
516
1,192
623
569
-1
-1
module Snorkels.Board ( Position , Snorkel (..) , Player , Group (..) , Piece (..) , Board (..) , isSnorkel , getPlayer , isBlocking -- * Checkers , isValid , isTrapped , hasLost -- * Generators , neighbours , allPositions , freePositions , growGroup , groupFrom , getGroups -- * Filters , areValid , areNeighbours , arePieces , areSnorkels , areFromPlayer -- * Manipulation , getPiece , putPiece , shufflePositions , throwStones ) where import Control.Monad (mfilter) import Data.Function import Data.Maybe import System.Random (RandomGen, randomRs) import qualified Data.Map.Strict as Map import qualified Data.Set as Set -- | Some (x, y) coordinate on the board type Position = (Int, Int) -- | Some player's pieces' color data Snorkel = Green | Purple | Red | Yellow | Cyan deriving (Show, Eq, Ord, Enum) -- | Each player has a distinctive color type Player = Snorkel -- | -- An horizontally or vertically connected group of 'Snorkel's that belong to -- the same 'Player' data Group = Group { positions :: Set.Set Position , player :: Player } deriving (Show, Eq, Ord) -- | -- Any type of piece on the board: either a 'Snorkel' or a 'Stone' data Piece = Snorkel Snorkel | Stone deriving (Show, Eq) data Board = Board { -- | Only 'Position's occupied by 'Piece's are here pieces :: Map.Map Position Piece -- | Width and height limits of the board , size :: (Int, Int) } deriving (Eq) -- This is just a shortcut for (isJust . getPlayer) -- | Check whether the 'Piece' is a 'Snorkel' isSnorkel :: Piece -> Bool isSnorkel Stone = False isSnorkel _ = True -- | Get the player owning the 'Piece' or 'Nothing' if the piece is a 'Stone' getPlayer :: Piece -> Maybe Player getPlayer Stone = Nothing getPlayer (Snorkel p) = Just p -- | -- Check whether the contents of a given 'Position' on the board suppose a block -- for a given 'Player'. Only 'Stone's and 'Snorkel's from a different 'Player' -- suppose a block. isBlocking :: Player -> Maybe Piece -> Bool isBlocking _ Nothing = False isBlocking player (Just piece) = maybe True (/= player) (getPlayer piece) -- | -- Given some @(min, max)@ bounds, check if an 'Int' is in them. -- @min@ is inclusive, @max@ isn't. inRange :: (Int, Int) -> Int -> Bool inRange (min, max) check = min <= check && check < max -- | -- Given some @(maxWidth, maxHeight)@ bounds and a 'Position', check whether the -- 'Position' is within those bounds. inBounds :: (Int, Int) -> Position -> Bool inBounds (maxX, maxY) (x, y) = inRange (0, maxX) x && inRange (0, maxY) y -- | -- Offset by @(x, y)@ a 'Position' to obtain a new one offset :: (Int, Int) -> Position -> Position offset (x, y) (x2, y2) = (x+x2, y+y2) -- | -- Given a 'Position', get the 'Set.Set' of 'Position's that are immediatelly -- above, under, to the left, or to the right. neighbours :: Position -> Set.Set Position neighbours position = Set.map (`offset` position) neighbourOffsets where neighbourOffsets = Set.fromList [(-1, 0), (1, 0), (0, -1), (0, 1)] -- | -- Check if some 'Position' is within the bounds of a board isValid :: Board -> Position -> Bool isValid board = inBounds $ board&size -- | -- Check if some 'Position's are within the bounds of a board areValid :: Board -> Set.Set Position -> Set.Set Position areValid board = Set.filter (isValid board) -- | -- Get all the 'Position's that are within a board allPositions :: Board -> Set.Set Position allPositions board = Set.fromList [(x, y) | x <- [0..width-1], y <- [0..height-1]] where (width, height) = board&size -- | -- Get all the 'Position's that are within a board and that haven't been already -- occupied. freePositions :: Board -> Set.Set Position freePositions board = Set.filter (flip Map.notMember $ board&pieces) . allPositions $ board -- | -- Get all the neighbour 'Position's of some 'Position's within a board. areNeighbours :: Board -> Set.Set Position -> Set.Set Position areNeighbours board positions = areValid board . flip Set.difference positions . Set.unions . map neighbours $ Set.toList positions -- | -- Filter 'Position's only leaving those which have a 'Piece' in some board. arePieces :: Board -> Set.Set Position -> Set.Set Position arePieces board = Set.intersection (Map.keysSet (board&pieces)) . areValid board -- | -- Filter 'Position's only leaving those which have a 'Snorkel' in some board. areSnorkels :: Board -> Set.Set Position -> Set.Set Position areSnorkels board = Set.filter (maybe False isSnorkel . getPiece board) . arePieces board -- | -- Filter 'Position's only leaving those which have a 'Snorkel' of the given -- 'Player' in some board. areFromPlayer :: Board -> Player -> Set.Set Position -> Set.Set Position areFromPlayer board player = Set.filter (maybe False fromPlayer . getPiece board) . areSnorkels board where fromPlayer = maybe False (player ==) . getPlayer -- | -- Put into a 'Group' 'Position's that are immediate neighbours. growGroup :: Board -> Group -> Group growGroup board initial | Set.null new = initial | otherwise = growGroup board group where group = Group {positions = Set.union initialPositions new, player = owner} new = areFromPlayer board owner $ areNeighbours board initialPositions initialPositions = initial&positions owner = initial&player -- TODO: Should this return a Maybe Group (to account for the possibility of the -- given position on the board being empty) or allow groups of empty positions -- too? Such groups might be useful for AI if we ever dare go there. -- | -- Form a 'Group' from an initial 'Position' putting all the 'Position's with -- 'Snorkel's from the same 'Player' that are connected, vertically or -- horizontally, and by 'Snorkel's of the same 'Player', to the initial -- 'Position' groupFrom :: Board -> Position -> Maybe Group groupFrom board pos = growGroup board <$> (groupForPlayer <$> owner) where groupForPlayer p = Group {positions = Set.singleton pos, player = p} owner = mfilter isSnorkel (getPiece board pos) >>= getPlayer -- | -- Get all the 'Group's on the board. getGroups :: Board -> Set.Set Group getGroups board = Set.map fromJust . Set.filter isJust . Set.map (groupFrom board) $ allPositions board -- | -- Check whether a given 'Group' is trapped by having all its surrounding -- positions taken by 'Stone's or 'Snorkel's from some other 'Player'. isTrapped :: Board -> Group -> Bool isTrapped board group = all (isBlocking (group&player) . getPiece board) (Set.toList $ areNeighbours board $ group&positions) -- | -- Check whether the given 'Player' has one of its snorkel 'Group's trapped. hasLost :: Board -> Player -> Bool hasLost board p = any (isTrapped board) (filter ((== p) . player) $ Set.toList $ getGroups board) -- | -- Get whatever is at the given 'Position' on the board. getPiece :: Board -> Position -> Maybe Piece getPiece board pos = Map.lookup pos $ board&pieces -- | -- Put a 'Piece' at the given 'Position' on the board. putPiece :: Board -> Position -> Piece -> Board putPiece board pos piece = board {pieces = Map.insert pos piece $ board&pieces} -- | -- Given a 'Set.Set' of 'Position's, get them in an ordered random list. shufflePositions :: RandomGen g => Set.Set Position -> g -> [Position] shufflePositions positions g = map (p !!) $ randomRs (0, length p - 1) g where p = Set.toList positions throwStone :: RandomGen g => Board -> g -> Either String Board throwStone board g | null $ freePositions board = Left "There is no place to throw a stone." | otherwise = Right $ putPiece board pos Stone where pos = head $ shufflePositions (freePositions board) g -- | -- Randomly throw the given number of 'Stone's on the board. throwStones :: RandomGen g => Board -> Int -> g -> Either String Board throwStones board 0 _ = Right board throwStones board n g = case throwStone board g of Right board -> throwStones board (n-1) g Left message -> Left message
unaizalakain/snorkels-hs
src/Snorkels/Board.hs
gpl-3.0
9,019
0
12
2,586
2,004
1,090
914
129
2
----------------------------------------------------------------------------- {- Copyright (C) 2004 Daan Leijen. This is free software; you can redistribute it and/or modify it under the terms described in the file "license.txt" at the root of the distribution. -} ----------------------------------------------------------------------------- {- | Maintainer : [email protected] Stability : provisional Portability : haskell98 Pretty print module based on Philip Wadlers /prettier printer/ * Philip Wadler, /A prettier printer/ Draft paper, April 1997, revised March 1998. <http://cm.bell-labs.com/cm/cs/who/wadler/papers/prettier/prettier.ps> -} ----------------------------------------------------------------------------- module PPrint ( Doc, Docs , Pretty(pretty,prettyList), putPretty , show, putDoc, hPutDoc , (<>) , (<+>) , (</>), (<//>) , (PPrint.<$>), (<$$>) , sep, fillSep, hsep, vsep , cat, fillCat, hcat, vcat , punctuate , align, hang, indent , fill, fillBreak , list, tupled, semiBraces, encloseSep , angles, langle, rangle , parens, lparen, rparen , braces, lbrace, rbrace , brackets, lbracket, rbracket , dquotes, dquote, squotes, squote , comma, space, dot, backslash , semi, colon, equals , string, bool, int, integer, float, double, rational , softline, softbreak , empty, char, text, line, linebreak, nest, group , column, nesting, width , SimpleDoc(..) , renderPretty, renderCompact , displayS, displayIO -- * Colors , Color(..) , color, bcolor , writeDoc ) where import System.IO -- (Handle,hPutStr,hPutChar,stdout,openFile,hClose) infixr 5 </>,<//>,<$>,<$$> infixr 6 <>,<+> ----------------------------------------------------------- -- list, tupled and semiBraces pretty print a list of -- documents either horizontally or vertically aligned. ----------------------------------------------------------- list = encloseSep lbracket rbracket comma tupled = encloseSep lparen rparen comma semiBraces = encloseSep lbrace rbrace semi encloseSep left right sep ds = case ds of [] -> left <> right [d] -> left <> d <> right _ -> align (cat (zipWith (<>) (left : repeat sep) ds) <> right) ----------------------------------------------------------- -- punctuate p [d1,d2,...,dn] => [d1 <> p,d2 <> p, ... ,dn] ----------------------------------------------------------- punctuate p [] = [] punctuate p [d] = [d] punctuate p (d:ds) = (d <> p) : punctuate p ds ----------------------------------------------------------- -- high-level combinators ----------------------------------------------------------- sep = group . vsep fillSep = fold (</>) hsep = fold (<+>) vsep = fold (PPrint.<$>) cat = group . vcat fillCat = fold (<//>) hcat = fold (<>) vcat = fold (<$$>) fold f [] = empty fold f ds = foldr1 f ds x <> y = x `beside` y x <+> y = x <> space <> y x </> y = x <> softline <> y x <//> y = x <> softbreak <> y x <$> y = x <> line <> y x <$$> y = x <> linebreak <> y softline = group line softbreak = group linebreak squotes = enclose squote squote dquotes = enclose dquote dquote braces = enclose lbrace rbrace parens = enclose lparen rparen angles = enclose langle rangle brackets = enclose lbracket rbracket enclose l r x = l <> x <> r lparen = char '(' rparen = char ')' langle = char '<' rangle = char '>' lbrace = char '{' rbrace = char '}' lbracket = char '[' rbracket = char ']' squote = char '\'' dquote = char '"' semi = char ';' colon = char ':' comma = char ',' space = char ' ' dot = char '.' backslash = char '\\' equals = char '=' ----------------------------------------------------------- -- Combinators for prelude types ----------------------------------------------------------- -- string is like "text" but replaces '\n' by "line" string "" = empty string ('\n':s) = line <> string s string s = case (span (/='\n') s) of (xs,ys) -> text xs <> string ys bool :: Bool -> Doc bool b = text (show b) int :: Int -> Doc int i = text (show i) integer :: Integer -> Doc integer i = text (show i) float :: Float -> Doc float f = text (show f) double :: Double -> Doc double d = text (show d) rational :: Rational -> Doc rational r = text (show r) ----------------------------------------------------------- -- overloading "pretty" ----------------------------------------------------------- putPretty :: Pretty a => a -> IO () putPretty p = putDoc (pretty p) class Pretty a where pretty :: a -> Doc prettyList :: [a] -> Doc prettyList = list . map pretty instance Pretty a => Pretty [a] where pretty = prettyList instance Pretty Doc where pretty = id instance Pretty () where pretty () = text "()" instance Pretty Bool where pretty b = bool b instance Pretty Char where pretty c = char c prettyList s = string s instance Pretty Int where pretty i = int i instance Pretty Integer where pretty i = integer i instance Pretty Float where pretty f = float f instance Pretty Double where pretty d = double d --instance Pretty Rational where -- pretty r = rational r instance (Pretty a,Pretty b) => Pretty (a,b) where pretty (x,y) = tupled [pretty x, pretty y] instance (Pretty a,Pretty b,Pretty c) => Pretty (a,b,c) where pretty (x,y,z)= tupled [pretty x, pretty y, pretty z] instance Pretty a => Pretty (Maybe a) where pretty Nothing = empty pretty (Just x) = pretty x ----------------------------------------------------------- -- semi primitive: fill and fillBreak ----------------------------------------------------------- fillBreak f x = width x (\w -> if (w > f) then nest f linebreak else text (spaces (f - w))) fill f d = width d (\w -> if (w >= f) then empty else text (spaces (f - w))) width d f = column (\k1 -> d <> column (\k2 -> f (k2 - k1))) ----------------------------------------------------------- -- semi primitive: Alignment and indentation ----------------------------------------------------------- indent i d = hang i (text (spaces i) <> d) hang i d = align (nest i d) align d = column (\k -> nesting (\i -> nest (k - i) d)) --nesting might be negative :-) ----------------------------------------------------------- -- Primitives ----------------------------------------------------------- type Docs = [Doc] data Doc = Empty | Char Char -- invariant: char is not '\n' | Text !Int String -- invariant: text doesn't contain '\n' | Line !Bool -- True <=> when undone by group, do not insert a space | Cat Doc Doc | Nest !Int Doc | Union Doc Doc -- invariant: first lines of first doc longer than the first lines of the second doc | Column (Int -> Doc) | Nesting (Int -> Doc) | Colored Bool Color Doc | ColoredEnd data SimpleDoc = SEmpty | SChar !Int Char SimpleDoc | SText !Int !Int String SimpleDoc | SLine !Int SimpleDoc | SColorOpen Bool Color SimpleDoc | SColorClose SimpleDoc empty = Empty char '\n' = line char c = Char c text "" = Empty text s = Text (length s) s line = Line False linebreak = Line True beside x y = Cat x y nest i x = Nest i x column f = Column f nesting f = Nesting f group x = Union (flatten x) x color c doc = Colored True c doc bcolor c doc = Colored False c doc flatten :: Doc -> Doc flatten (Cat x y) = Cat (flatten x) (flatten y) flatten (Nest i x) = Nest i (flatten x) flatten (Line break) = if break then Empty else Text 1 " " flatten (Union x y) = flatten x flatten (Column f) = Column (flatten . f) flatten (Nesting f) = Nesting (flatten . f) flatten (Colored f c d) = Colored f c (flatten d) flatten other = other --Empty,Char,Text ----------------------------------------------------------- -- Renderers ----------------------------------------------------------- ----------------------------------------------------------- -- renderPretty: the default pretty printing algorithm ----------------------------------------------------------- -- list of indentation/document pairs; saves an indirection over [(Int,Doc)] data DocList = Nil | Cons !Int Doc DocList renderPretty :: Float -> Int -> Doc -> SimpleDoc renderPretty rfrac w x = best 0 0 0 (Cons 0 x Nil) where -- r :: the ribbon width in characters r = max 0 (min w (round (fromIntegral w * rfrac))) -- best :: b = base nesting -- n = indentation of current line -- k = current column -- (ie. (k >= n) && (k - n == count of inserted characters) best b n k Nil = SEmpty best b n k (Cons i d ds) = case d of Empty -> best b n k ds Char c -> let k' = k+1 in seq k' (SChar b c (best b n k' ds)) Text l s -> let k' = k+l in seq k' (SText b l s (best b n k' ds)) Line _ -> SLine i (best b i i ds) Cat x y -> best b n k (Cons i x (Cons i y ds)) Nest j x -> let i' = i+j in seq i' (best (if b==0 then i' else b) n k (Cons i' x ds)) Union x y -> nicest n k (best b n k (Cons i x ds)) (best b n k (Cons i y ds)) Column f -> best b n k (Cons i (f k) ds) Nesting f -> best b n k (Cons i (f i) ds) Colored f c x -> SColorOpen f c (best b n k (Cons i x (Cons i ColoredEnd ds))) ColoredEnd -> SColorClose (best b n k ds) --nicest :: r = ribbon width, w = page width, -- n = indentation of current line, k = current column -- x and y, the (simple) documents to chose from. -- precondition: first lines of x are longer than the first lines of y. nicest n k x y | fits width x = x | otherwise = y where width = min (w - k) (r - k + n) fits w x | w < 0 = False fits w SEmpty = True fits w (SChar i c x) = fits (w - 1) x fits w (SText i l s x) = fits (w - l) x fits w (SLine i x) = True fits w (SColorOpen f c x) = fits w x fits w (SColorClose x) = fits w x ----------------------------------------------------------- -- renderCompact: renders documents without indentation -- fast and fewer characters output, good for machines ----------------------------------------------------------- renderCompact :: Doc -> SimpleDoc renderCompact x = scan 0 [x] where scan k [] = SEmpty scan k (d:ds) = case d of Empty -> scan k ds Char c -> let k' = k+1 in seq k' (SChar 0 c (scan k' ds)) Text l s -> let k' = k+l in seq k' (SText 0 l s (scan k' ds)) Line _ -> SLine 0 (scan 0 ds) Cat x y -> scan k (x:y:ds) Nest j x -> scan k (x:ds) Union x y -> scan k (y:ds) Column f -> scan k (f k:ds) Nesting f -> scan k (f 0:ds) Colored f c x-> SColorOpen f c (scan k (x : ColoredEnd : ds)) ColoredEnd -> SColorClose (scan k ds) ----------------------------------------------------------- -- Displayers: displayS and displayIO ----------------------------------------------------------- displayS :: SimpleDoc -> ShowS displayS SEmpty = id displayS (SChar i c x) = showChar c . displayS x displayS (SText i l s x) = showString s . displayS x displayS (SLine i x) = showString ('\n':indentation i) . displayS x displayS (SColorOpen f c x) = displayS x displayS (SColorClose x) = displayS x displayIO :: Handle -> SimpleDoc -> IO () displayIO handle simpleDoc = display simpleDoc where display SEmpty = return () display (SChar i c x) = do{ hPutChar handle c; display x} display (SText i l s x) = do{ hPutStr handle s; display x} display (SLine i x) = do{ hPutStr handle ('\n':indentation i); display x} display (SColorOpen f c x)= display x display (SColorClose x) = display x {-------------------------------------------------------------------------- Interface --------------------------------------------------------------------------} -- | Available colors on a console. Normally, background colors are -- converted to their /dark/ variant. data Color = Black | DarkRed | DarkGreen | DarkYellow | DarkBlue | DarkMagenta | DarkCyan | Gray | DarkGray | Red | Green | Yellow | Blue | Magenta | Cyan | White | ColorDefault deriving (Show,Eq,Ord,Enum) ----------------------------------------------------------- -- default pretty printers: show, putDoc and hPutDoc ----------------------------------------------------------- instance Show Doc where showsPrec d doc = displayS (renderPretty 0.8 80 doc) putDoc :: Doc -> IO () putDoc doc = hPutDoc stdout doc hPutDoc :: Handle -> Doc -> IO () hPutDoc handle doc = displayIO handle (renderPretty 0.8 80 doc) writeDoc :: FilePath -> Doc -> IO () writeDoc fpath doc = do h <- openFile fpath WriteMode hPutDoc h doc hClose h ----------------------------------------------------------- -- insert spaces -- "indentation" used to insert tabs but tabs seem to cause -- more trouble than they solve :-) ----------------------------------------------------------- spaces n | n <= 0 = "" | otherwise = replicate n ' ' indentation n = spaces n --indentation n | n >= 8 = '\t' : indentation (n-8) -- | otherwise = spaces n
alvisespano/Lw
extras/hml-prototype/PPrint.hs
gpl-3.0
15,232
0
17
5,081
4,285
2,227
2,058
301
13
-- This Source Code Form is subject to the terms of the Mozilla Public -- License, v. 2.0. If a copy of the MPL was not distributed with this -- file, You can obtain one at http://mozilla.org/MPL/2.0/. {-# LANGUAGE OverloadedStrings #-} module Database.Redis.IO.Settings where import Data.Time import Data.Word import Database.Redis.IO.Types (Milliseconds (..)) data Settings = Settings { sHost :: !String , sPort :: !Word16 , sIdleTimeout :: !NominalDiffTime , sMaxConnections :: !Int , sPoolStripes :: !Int , sConnectTimeout :: !Milliseconds , sSendRecvTimeout :: !Milliseconds } -- | Default settings. -- -- * host = localhost -- * port = 6379 -- * idle timeout = 60s -- * stripes = 2 -- * connections per stripe = 25 -- * connect timeout = 5s -- * send-receive timeout = 10s defSettings :: Settings defSettings = Settings "localhost" 6379 60 -- idle timeout 50 -- max connections per stripe 2 -- max stripes 5000 -- connect timeout 10000 -- send and recv timeout (sum) setHost :: String -> Settings -> Settings setHost v s = s { sHost = v } setPort :: Word16 -> Settings -> Settings setPort v s = s { sPort = v } setIdleTimeout :: NominalDiffTime -> Settings -> Settings setIdleTimeout v s = s { sIdleTimeout = v } -- | Maximum connections per pool stripe. setMaxConnections :: Int -> Settings -> Settings setMaxConnections v s = s { sMaxConnections = v } setPoolStripes :: Int -> Settings -> Settings setPoolStripes v s | v < 1 = error "Network.Redis.IO.Settings: at least one stripe required" | otherwise = s { sPoolStripes = v } -- | When a pool connection is opened, connect timeout is the maximum time -- we are willing to wait for the connection attempt to the redis server to -- succeed. setConnectTimeout :: NominalDiffTime -> Settings -> Settings setConnectTimeout v s = s { sConnectTimeout = Ms $ round (1000 * v) } setSendRecvTimeout :: NominalDiffTime -> Settings -> Settings setSendRecvTimeout v s = s { sSendRecvTimeout = Ms $ round (1000 * v) }
twittner/redis-io
src/Database/Redis/IO/Settings.hs
mpl-2.0
2,083
0
10
463
406
233
173
50
1
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE ViewPatterns #-} -- Module : Gen.Tree -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : provisional -- Portability : non-portable (GHC extensions) module Gen.Tree ( root , fold , populate ) where import Control.Error import Control.Lens ((^.)) import Control.Monad import Control.Monad.Except import Data.Aeson hiding (json) import Data.Bifunctor import Data.Foldable (foldr') import Data.Monoid import Data.Text (Text) import qualified Data.Text as Text import qualified Data.Text.Lazy as LText import Filesystem.Path.CurrentOS hiding (FilePath, root) import Gen.Formatting (failure, shown) import qualified Gen.JSON as JS import Gen.Types import Prelude hiding (mod) import System.Directory.Tree hiding (file) import Text.EDE hiding (failure, render) root :: AnchoredDirTree a -> Path root (p :/ d) = decodeString p </> decodeString (name d) fold :: MonadError Error m => (Path -> m ()) -- ^ Directories -> (Path -> a -> m b) -- ^ Files -> AnchoredDirTree a -> m (AnchoredDirTree b) fold g f (p :/ t) = (p :/) <$> go (decodeString p) t where go x = \case Failed n e -> failure shown e >> return (Failed n e) File n a -> File n <$> f (x </> decodeString n) a Dir n cs -> g d >> Dir n <$> mapM (go d) cs where d = x </> decodeString n -- If Nothing, then touch the file, otherwise write the Just contents. type Touch = Maybe Rendered populate :: Path -> Templates -> Library -> Either Error (AnchoredDirTree Touch) populate d Templates {..} l = (encodeString d :/) . dir lib <$> layout where layout :: Either Error [DirTree Touch] layout = traverse sequenceA [ dir "src" -- Supress cabal warnings about directories listed that don't exist. [ touch ".gitkeep" ] , file (lib <.> "cabal") cabalTemplate , file "README.md" readmeTemplate , dir "gen" $ [ mod' (tocNS l) tocImports tocTemplate (pure env) , mod' (typesNS l) typeImports typesTemplate (pure env) , mod' (prodNS l) prodImports prodTemplate (pure env) , mod' (sumNS l) sumImports sumTemplate (pure env) ] ++ map resource (_apiResources (l ^. lAPI)) ++ map method (_apiMethods (l ^. lAPI)) ] where -- FIXME: now redundant resource a = mod' (_actNamespace a) actionImports actionTemplate (action a) method a = mod' (_actNamespace a) actionImports actionTemplate (action a) action a = let Object o = object ["action" .= a] Object e = env in pure $ Object (o <> e) Imports {..} = serviceImports l lib = fromText (l ^. sLibrary) mod' ns is t = write . module' ns is t file p t = write $ file' p t (pure env) env :: Value env = toJSON l module' :: ToJSON a => NS -> [NS] -> Template -> Either Error a -> DirTree (Either Error Rendered) module' ns is t f = namespaced ns t $ do x <- f >>= JS.objectErr (show ns) return $! x <> fromPairs [ "moduleName" .= ns , "moduleImports" .= is ] namespaced :: ToJSON a => NS -> Template -> Either Error a -> DirTree (Either Error Rendered) namespaced (unNS -> ns) t x = case map fromText ns of [] -> error "Empty namespace." [p] -> f p ps -> foldr' nest (f (last ps)) (init ps) where f p = file' (p <.> "hs") t x nest d c = Dir (encodeString d) [c] file' :: ToJSON a => Path -> Template -> Either Error a -> DirTree (Either Error Rendered) file' (encodeString -> p) t f = File p $ f >>= JS.objectErr p >>= fmapL LText.pack . eitherRender t dir :: Path -> [DirTree a] -> DirTree a dir p = Dir (encodeString p) write :: DirTree (Either e a) -> DirTree (Either e (Maybe a)) write = fmap (second Just) touch :: Text -> DirTree (Either e (Maybe a)) touch f = File (Text.unpack f) (Right Nothing)
rueshyna/gogol
gen/src/Gen/Tree.hs
mpl-2.0
4,879
0
15
1,701
1,491
767
724
114
3
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.AndroidPublisher.Edits.Details.Patch -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Patches details of an app. -- -- /See:/ <https://developers.google.com/android-publisher Google Play Android Developer API Reference> for @androidpublisher.edits.details.patch@. module Network.Google.Resource.AndroidPublisher.Edits.Details.Patch ( -- * REST Resource EditsDetailsPatchResource -- * Creating a Request , editsDetailsPatch , EditsDetailsPatch -- * Request Lenses , edpXgafv , edpUploadProtocol , edpPackageName , edpAccessToken , edpUploadType , edpPayload , edpEditId , edpCallback ) where import Network.Google.AndroidPublisher.Types import Network.Google.Prelude -- | A resource alias for @androidpublisher.edits.details.patch@ method which the -- 'EditsDetailsPatch' request conforms to. type EditsDetailsPatchResource = "androidpublisher" :> "v3" :> "applications" :> Capture "packageName" Text :> "edits" :> Capture "editId" Text :> "details" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] AppDetails :> Patch '[JSON] AppDetails -- | Patches details of an app. -- -- /See:/ 'editsDetailsPatch' smart constructor. data EditsDetailsPatch = EditsDetailsPatch' { _edpXgafv :: !(Maybe Xgafv) , _edpUploadProtocol :: !(Maybe Text) , _edpPackageName :: !Text , _edpAccessToken :: !(Maybe Text) , _edpUploadType :: !(Maybe Text) , _edpPayload :: !AppDetails , _edpEditId :: !Text , _edpCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'EditsDetailsPatch' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'edpXgafv' -- -- * 'edpUploadProtocol' -- -- * 'edpPackageName' -- -- * 'edpAccessToken' -- -- * 'edpUploadType' -- -- * 'edpPayload' -- -- * 'edpEditId' -- -- * 'edpCallback' editsDetailsPatch :: Text -- ^ 'edpPackageName' -> AppDetails -- ^ 'edpPayload' -> Text -- ^ 'edpEditId' -> EditsDetailsPatch editsDetailsPatch pEdpPackageName_ pEdpPayload_ pEdpEditId_ = EditsDetailsPatch' { _edpXgafv = Nothing , _edpUploadProtocol = Nothing , _edpPackageName = pEdpPackageName_ , _edpAccessToken = Nothing , _edpUploadType = Nothing , _edpPayload = pEdpPayload_ , _edpEditId = pEdpEditId_ , _edpCallback = Nothing } -- | V1 error format. edpXgafv :: Lens' EditsDetailsPatch (Maybe Xgafv) edpXgafv = lens _edpXgafv (\ s a -> s{_edpXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). edpUploadProtocol :: Lens' EditsDetailsPatch (Maybe Text) edpUploadProtocol = lens _edpUploadProtocol (\ s a -> s{_edpUploadProtocol = a}) -- | Package name of the app. edpPackageName :: Lens' EditsDetailsPatch Text edpPackageName = lens _edpPackageName (\ s a -> s{_edpPackageName = a}) -- | OAuth access token. edpAccessToken :: Lens' EditsDetailsPatch (Maybe Text) edpAccessToken = lens _edpAccessToken (\ s a -> s{_edpAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). edpUploadType :: Lens' EditsDetailsPatch (Maybe Text) edpUploadType = lens _edpUploadType (\ s a -> s{_edpUploadType = a}) -- | Multipart request metadata. edpPayload :: Lens' EditsDetailsPatch AppDetails edpPayload = lens _edpPayload (\ s a -> s{_edpPayload = a}) -- | Identifier of the edit. edpEditId :: Lens' EditsDetailsPatch Text edpEditId = lens _edpEditId (\ s a -> s{_edpEditId = a}) -- | JSONP edpCallback :: Lens' EditsDetailsPatch (Maybe Text) edpCallback = lens _edpCallback (\ s a -> s{_edpCallback = a}) instance GoogleRequest EditsDetailsPatch where type Rs EditsDetailsPatch = AppDetails type Scopes EditsDetailsPatch = '["https://www.googleapis.com/auth/androidpublisher"] requestClient EditsDetailsPatch'{..} = go _edpPackageName _edpEditId _edpXgafv _edpUploadProtocol _edpAccessToken _edpUploadType _edpCallback (Just AltJSON) _edpPayload androidPublisherService where go = buildClient (Proxy :: Proxy EditsDetailsPatchResource) mempty
brendanhay/gogol
gogol-android-publisher/gen/Network/Google/Resource/AndroidPublisher/Edits/Details/Patch.hs
mpl-2.0
5,480
0
21
1,387
866
503
363
129
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Games.Events.Record -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Records a batch of changes to the number of times events have occurred -- for the currently authenticated user of this application. -- -- /See:/ <https://developers.google.com/games/services/ Google Play Game Services API Reference> for @games.events.record@. module Network.Google.Resource.Games.Events.Record ( -- * REST Resource EventsRecordResource -- * Creating a Request , eventsRecord , EventsRecord -- * Request Lenses , erConsistencyToken , erPayload , erLanguage ) where import Network.Google.Games.Types import Network.Google.Prelude -- | A resource alias for @games.events.record@ method which the -- 'EventsRecord' request conforms to. type EventsRecordResource = "games" :> "v1" :> "events" :> QueryParam "consistencyToken" (Textual Int64) :> QueryParam "language" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] EventRecordRequest :> Post '[JSON] EventUpdateResponse -- | Records a batch of changes to the number of times events have occurred -- for the currently authenticated user of this application. -- -- /See:/ 'eventsRecord' smart constructor. data EventsRecord = EventsRecord' { _erConsistencyToken :: !(Maybe (Textual Int64)) , _erPayload :: !EventRecordRequest , _erLanguage :: !(Maybe Text) } deriving (Eq,Show,Data,Typeable,Generic) -- | Creates a value of 'EventsRecord' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'erConsistencyToken' -- -- * 'erPayload' -- -- * 'erLanguage' eventsRecord :: EventRecordRequest -- ^ 'erPayload' -> EventsRecord eventsRecord pErPayload_ = EventsRecord' { _erConsistencyToken = Nothing , _erPayload = pErPayload_ , _erLanguage = Nothing } -- | The last-seen mutation timestamp. erConsistencyToken :: Lens' EventsRecord (Maybe Int64) erConsistencyToken = lens _erConsistencyToken (\ s a -> s{_erConsistencyToken = a}) . mapping _Coerce -- | Multipart request metadata. erPayload :: Lens' EventsRecord EventRecordRequest erPayload = lens _erPayload (\ s a -> s{_erPayload = a}) -- | The preferred language to use for strings returned by this method. erLanguage :: Lens' EventsRecord (Maybe Text) erLanguage = lens _erLanguage (\ s a -> s{_erLanguage = a}) instance GoogleRequest EventsRecord where type Rs EventsRecord = EventUpdateResponse type Scopes EventsRecord = '["https://www.googleapis.com/auth/games", "https://www.googleapis.com/auth/plus.login"] requestClient EventsRecord'{..} = go _erConsistencyToken _erLanguage (Just AltJSON) _erPayload gamesService where go = buildClient (Proxy :: Proxy EventsRecordResource) mempty
rueshyna/gogol
gogol-games/gen/Network/Google/Resource/Games/Events/Record.hs
mpl-2.0
3,743
0
14
874
491
290
201
73
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.SecurityCenter.Organizations.Operations.Cancel -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Starts asynchronous cancellation on a long-running operation. The server -- makes a best effort to cancel the operation, but success is not -- guaranteed. If the server doesn\'t support this method, it returns -- \`google.rpc.Code.UNIMPLEMENTED\`. Clients can use -- Operations.GetOperation or other methods to check whether the -- cancellation succeeded or whether the operation completed despite -- cancellation. On successful cancellation, the operation is not deleted; -- instead, it becomes an operation with an Operation.error value with a -- google.rpc.Status.code of 1, corresponding to \`Code.CANCELLED\`. -- -- /See:/ <https://console.cloud.google.com/apis/api/securitycenter.googleapis.com/overview Security Command Center API Reference> for @securitycenter.organizations.operations.cancel@. module Network.Google.Resource.SecurityCenter.Organizations.Operations.Cancel ( -- * REST Resource OrganizationsOperationsCancelResource -- * Creating a Request , organizationsOperationsCancel , OrganizationsOperationsCancel -- * Request Lenses , oocXgafv , oocUploadProtocol , oocAccessToken , oocUploadType , oocName , oocCallback ) where import Network.Google.Prelude import Network.Google.SecurityCenter.Types -- | A resource alias for @securitycenter.organizations.operations.cancel@ method which the -- 'OrganizationsOperationsCancel' request conforms to. type OrganizationsOperationsCancelResource = "v1p1beta1" :> CaptureMode "name" "cancel" Text :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> Post '[JSON] Empty -- | Starts asynchronous cancellation on a long-running operation. The server -- makes a best effort to cancel the operation, but success is not -- guaranteed. If the server doesn\'t support this method, it returns -- \`google.rpc.Code.UNIMPLEMENTED\`. Clients can use -- Operations.GetOperation or other methods to check whether the -- cancellation succeeded or whether the operation completed despite -- cancellation. On successful cancellation, the operation is not deleted; -- instead, it becomes an operation with an Operation.error value with a -- google.rpc.Status.code of 1, corresponding to \`Code.CANCELLED\`. -- -- /See:/ 'organizationsOperationsCancel' smart constructor. data OrganizationsOperationsCancel = OrganizationsOperationsCancel' { _oocXgafv :: !(Maybe Xgafv) , _oocUploadProtocol :: !(Maybe Text) , _oocAccessToken :: !(Maybe Text) , _oocUploadType :: !(Maybe Text) , _oocName :: !Text , _oocCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'OrganizationsOperationsCancel' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'oocXgafv' -- -- * 'oocUploadProtocol' -- -- * 'oocAccessToken' -- -- * 'oocUploadType' -- -- * 'oocName' -- -- * 'oocCallback' organizationsOperationsCancel :: Text -- ^ 'oocName' -> OrganizationsOperationsCancel organizationsOperationsCancel pOocName_ = OrganizationsOperationsCancel' { _oocXgafv = Nothing , _oocUploadProtocol = Nothing , _oocAccessToken = Nothing , _oocUploadType = Nothing , _oocName = pOocName_ , _oocCallback = Nothing } -- | V1 error format. oocXgafv :: Lens' OrganizationsOperationsCancel (Maybe Xgafv) oocXgafv = lens _oocXgafv (\ s a -> s{_oocXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). oocUploadProtocol :: Lens' OrganizationsOperationsCancel (Maybe Text) oocUploadProtocol = lens _oocUploadProtocol (\ s a -> s{_oocUploadProtocol = a}) -- | OAuth access token. oocAccessToken :: Lens' OrganizationsOperationsCancel (Maybe Text) oocAccessToken = lens _oocAccessToken (\ s a -> s{_oocAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). oocUploadType :: Lens' OrganizationsOperationsCancel (Maybe Text) oocUploadType = lens _oocUploadType (\ s a -> s{_oocUploadType = a}) -- | The name of the operation resource to be cancelled. oocName :: Lens' OrganizationsOperationsCancel Text oocName = lens _oocName (\ s a -> s{_oocName = a}) -- | JSONP oocCallback :: Lens' OrganizationsOperationsCancel (Maybe Text) oocCallback = lens _oocCallback (\ s a -> s{_oocCallback = a}) instance GoogleRequest OrganizationsOperationsCancel where type Rs OrganizationsOperationsCancel = Empty type Scopes OrganizationsOperationsCancel = '["https://www.googleapis.com/auth/cloud-platform"] requestClient OrganizationsOperationsCancel'{..} = go _oocName _oocXgafv _oocUploadProtocol _oocAccessToken _oocUploadType _oocCallback (Just AltJSON) securityCenterService where go = buildClient (Proxy :: Proxy OrganizationsOperationsCancelResource) mempty
brendanhay/gogol
gogol-securitycenter/gen/Network/Google/Resource/SecurityCenter/Organizations/Operations/Cancel.hs
mpl-2.0
6,054
0
15
1,221
713
423
290
101
1
{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeOperators #-} {-# OPTIONS_GHC -fno-warn-duplicate-exports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} -- | -- Module : Network.Google.Resource.Docs.Documents.Create -- Copyright : (c) 2015-2016 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <[email protected]> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- Creates a blank document using the title given in the request. Other -- fields in the request, including any provided content, are ignored. -- Returns the created document. -- -- /See:/ <https://developers.google.com/docs/ Google Docs API Reference> for @docs.documents.create@. module Network.Google.Resource.Docs.Documents.Create ( -- * REST Resource DocumentsCreateResource -- * Creating a Request , documentsCreate , DocumentsCreate -- * Request Lenses , dcXgafv , dcUploadProtocol , dcAccessToken , dcUploadType , dcPayload , dcCallback ) where import Network.Google.Docs.Types import Network.Google.Prelude -- | A resource alias for @docs.documents.create@ method which the -- 'DocumentsCreate' request conforms to. type DocumentsCreateResource = "v1" :> "documents" :> QueryParam "$.xgafv" Xgafv :> QueryParam "upload_protocol" Text :> QueryParam "access_token" Text :> QueryParam "uploadType" Text :> QueryParam "callback" Text :> QueryParam "alt" AltJSON :> ReqBody '[JSON] Document :> Post '[JSON] Document -- | Creates a blank document using the title given in the request. Other -- fields in the request, including any provided content, are ignored. -- Returns the created document. -- -- /See:/ 'documentsCreate' smart constructor. data DocumentsCreate = DocumentsCreate' { _dcXgafv :: !(Maybe Xgafv) , _dcUploadProtocol :: !(Maybe Text) , _dcAccessToken :: !(Maybe Text) , _dcUploadType :: !(Maybe Text) , _dcPayload :: !Document , _dcCallback :: !(Maybe Text) } deriving (Eq, Show, Data, Typeable, Generic) -- | Creates a value of 'DocumentsCreate' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'dcXgafv' -- -- * 'dcUploadProtocol' -- -- * 'dcAccessToken' -- -- * 'dcUploadType' -- -- * 'dcPayload' -- -- * 'dcCallback' documentsCreate :: Document -- ^ 'dcPayload' -> DocumentsCreate documentsCreate pDcPayload_ = DocumentsCreate' { _dcXgafv = Nothing , _dcUploadProtocol = Nothing , _dcAccessToken = Nothing , _dcUploadType = Nothing , _dcPayload = pDcPayload_ , _dcCallback = Nothing } -- | V1 error format. dcXgafv :: Lens' DocumentsCreate (Maybe Xgafv) dcXgafv = lens _dcXgafv (\ s a -> s{_dcXgafv = a}) -- | Upload protocol for media (e.g. \"raw\", \"multipart\"). dcUploadProtocol :: Lens' DocumentsCreate (Maybe Text) dcUploadProtocol = lens _dcUploadProtocol (\ s a -> s{_dcUploadProtocol = a}) -- | OAuth access token. dcAccessToken :: Lens' DocumentsCreate (Maybe Text) dcAccessToken = lens _dcAccessToken (\ s a -> s{_dcAccessToken = a}) -- | Legacy upload protocol for media (e.g. \"media\", \"multipart\"). dcUploadType :: Lens' DocumentsCreate (Maybe Text) dcUploadType = lens _dcUploadType (\ s a -> s{_dcUploadType = a}) -- | Multipart request metadata. dcPayload :: Lens' DocumentsCreate Document dcPayload = lens _dcPayload (\ s a -> s{_dcPayload = a}) -- | JSONP dcCallback :: Lens' DocumentsCreate (Maybe Text) dcCallback = lens _dcCallback (\ s a -> s{_dcCallback = a}) instance GoogleRequest DocumentsCreate where type Rs DocumentsCreate = Document type Scopes DocumentsCreate = '["https://www.googleapis.com/auth/documents", "https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.file"] requestClient DocumentsCreate'{..} = go _dcXgafv _dcUploadProtocol _dcAccessToken _dcUploadType _dcCallback (Just AltJSON) _dcPayload docsService where go = buildClient (Proxy :: Proxy DocumentsCreateResource) mempty
brendanhay/gogol
gogol-docs/gen/Network/Google/Resource/Docs/Documents/Create.hs
mpl-2.0
4,725
0
16
1,116
713
418
295
103
1
{-# LANGUAGE GeneralizedNewtypeDeriving #-} module Read where import Text.Read (readPrec_to_S, readPrec, minPrec) import qualified Text.ParserCombinators.ReadP as P import Text.ParserCombinators.ReadPrec (lift) import Numeric -- This library (libraries/base) is derived from code from several -- sources: -- * Code from the GHC project which is largely (c) The University of -- Glasgow, and distributable under a BSD-style license (see below), -- * Code from the Haskell 98 Report which is (c) Simon Peyton Jones -- and freely redistributable (but see the full license for -- restrictions). -- * Code from the Haskell Foreign Function Interface specification, -- which is (c) Manuel M. T. Chakravarty and freely redistributable -- (but see the full license for restrictions). -- The full text of these licenses is reproduced below. All of the -- licenses are BSD-style or compatible. -- ----------------------------------------------------------------------------- -- The Glasgow Haskell Compiler License -- Copyright 2004, The University Court of the University of Glasgow. -- All rights reserved. -- Redistribution and use in source and binary forms, with or without -- modification, are permitted provided that the following conditions are met: -- - Redistributions of source code must retain the above copyright notice, -- this list of conditions and the following disclaimer. -- - Redistributions in binary form must reproduce the above copyright notice, -- this list of conditions and the following disclaimer in the documentation -- and/or other materials provided with the distribution. -- - Neither name of the University nor the names of its contributors may be -- used to endorse or promote products derived from this software without -- specific prior written permission. -- THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF -- GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, -- INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -- FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -- UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE -- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -- LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -- OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -- DAMAGE. -- ----------------------------------------------------------------------------- -- Code derived from the document "Report on the Programming Language -- Haskell 98", is distributed under the following license: -- Copyright (c) 2002 Simon Peyton Jones -- The authors intend this Report to belong to the entire Haskell -- community, and so we grant permission to copy and distribute it for -- any purpose, provided that it is reproduced in its entirety, -- including this Notice. Modified versions of this Report may also be -- copied and distributed for any purpose, provided that the modified -- version is clearly presented as such, and that it does not claim to -- be a definition of the Haskell 98 Language. -- ----------------------------------------------------------------------------- -- Code derived from the document "The Haskell 98 Foreign Function -- Interface, An Addendum to the Haskell 98 Report" is distributed under -- the following license: -- Copyright (c) 2002 Manuel M. T. Chakravarty -- The authors intend this Report to belong to the entire Haskell -- community, and so we grant permission to copy and distribute it for -- any purpose, provided that it is reproduced in its entirety, -- including this Notice. Modified versions of this Report may also be -- copied and distributed for any purpose, provided that the modified -- version is clearly presented as such, and that it does not claim to -- be a definition of the Haskell 98 Foreign Function Interface. -- ----------------------------------------------------------------------------- readEitherNote :: Read a => String -> String -> Either String a readEitherNote note s = case [ x | (x,"") <- readPrec_to_S read' minPrec s ] of [x] -> Right x [] -> Left $ "Read.readEitherNote: no parse on \""++ s ++"\", " ++ note _ -> Left $ "Read.readEitherNote: ambiguous parse on \""++ s ++"\", " ++ note where read' = do x <- readPrec lift P.skipSpaces return x readMaybe :: Read a => String -> Maybe a readMaybe s = case readEitherNote "" s of Left _ -> Nothing Right a -> Just a newtype Hex a = Hex { unHex :: a } deriving (Eq, Ord, Num) instance (Eq a, Num a) => Read (Hex a) where readsPrec _ = readHex
DanielG/kvm-in-a-box
src/Read.hs
agpl-3.0
4,980
0
11
924
378
228
150
23
3
-- Tracking down a problem with event merging import Data.Monoid (mappend) import Control.Applicative ((<$>)) import FRP.Reactive.Improving import FRP.Reactive.Future import FRP.Reactive.PrimReactive import FRP.Reactive.Reactive import FRP.Reactive.Internal.Future import FRP.Reactive.Internal.Reactive -- (Imp 1.0,1)->(Imp 2.0,2)->(Imp 3.0,3)->(Imp *** Exception: Prelude.undefined e1 = listEG [(exactly 1,1),(exactly 2,2),(exactly 3,3),(after 4,17)] -- (Imp 1.5,100)->(Imp 2.5,200) e2 = listEG [(exactly 1.5, 100), (exactly 2.5, 200)] -- (Imp *** Exception: Prelude.undefined e3 = listEG [(after 2.5, 200)] -- (Imp 1.5,100)->(Imp 2.3,200)->(Imp *** Exception: Prelude.undefined e3' = listEG [(exactly 1.5, 100), (exactly 2.3, 200), (after 2.5, 300)] -- (Imp 1.0,1)->(Imp 1.5,100)->(Imp 2.0,2)->(Imp 2.5,200)->(Imp 3.0,3)->(Imp *** Exception: Prelude.undefined e4 = e1 `mappend` e2 -- (Imp 1.0,1)->(Imp 2.0,2)<interactive>: after: comparing after e5 = e1 `mappend` e3 -- (Imp 1.0,1)->(Imp 1.5,100)->(Imp 2.0,2)->(Imp 2.3,200)<interactive>: after: comparing after e5' = e1 `mappend` e3' -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)->(Imp 3.0,3)->(Imp *** Exception: Prelude.undefined f1 = eFuture e1 -- <NoBound Imp 1.5,100 `Stepper` (Imp 2.5,200)> f2 = eFuture e2 -- <NoBound Imp *** Exception: Prelude.undefined f3 = eFuture e3 -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)->(Imp 3.0,3)->(Imp *** Exception: Prelude.undefined f4 = f1 `mappend` f3 -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)<interactive>: after: comparing after f5 = f1 `merge` f3 -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)<interactive>: after: comparing after f5' = eFuture e5 -- type Binop a = a -> a -> a mergeLR, mergeL, mergeR :: (Ord s) => Binop (FutureG s (ReactiveG s b)) -- Same as 'merge' u `mergeLR` v = (inFutR (`merge` v) <$> u) `mappend` (inFutR (u `merge`) <$> v) u `mergeL` v = inFutR (`merge` v) <$> u u `mergeR` v = inFutR (u `merge`) <$> v -- inFutR :: (FutureG s (ReactiveG s b) -> FutureG t (ReactiveG t b)) -- -> (ReactiveG s b -> ReactiveG t b) -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)<interactive>: after: comparing after f6 = f1 `mergeLR` f3 -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)<interactive>: after: comparing after f7 :: Future (Reactive Integer) f7 = f1 `mergeL` f3 -- <NoBound Imp *** Exception: Prelude.undefined f8 = f1 `mergeR` f3 f7' :: Future (Reactive Integer) -- <NoBound Imp 1.0,1 `Stepper` (Imp 2.0,2)<interactive>: after: comparing after f7' = q <$> f1 where q (a `Stepper` Event u') = a `Stepper` Event (u' `merge` f3)
ekmett/reactive
src/Test/Merge.hs
agpl-3.0
2,581
0
10
416
587
353
234
34
1
{-# OPTIONS_HADDOCK hide #-} ----------------------------------------------------------------------------- -- | -- Module : Graphics.Rendering.Cairo.Internal -- Copyright : (c) Paolo Martini 2005 -- License : BSD-style (see cairo/COPYRIGHT) -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- Direct bindings to the cairo library. ----------------------------------------------------------------------------- -- #hide -- module Graphics.Rendering.Cairo.Internal ( Render(..), bracketR , module Graphics.Rendering.Cairo.Types , module Graphics.Rendering.Cairo.Internal.Drawing.Cairo , module Graphics.Rendering.Cairo.Internal.Drawing.Paths , module Graphics.Rendering.Cairo.Internal.Drawing.Patterns , module Graphics.Rendering.Cairo.Internal.Drawing.Text , module Graphics.Rendering.Cairo.Internal.Drawing.Transformations , module Graphics.Rendering.Cairo.Internal.Fonts.FontOptions , module Graphics.Rendering.Cairo.Internal.Surfaces.Image , module Graphics.Rendering.Cairo.Internal.Surfaces.PDF , module Graphics.Rendering.Cairo.Internal.Surfaces.PNG , module Graphics.Rendering.Cairo.Internal.Surfaces.PS , module Graphics.Rendering.Cairo.Internal.Surfaces.SVG , module Graphics.Rendering.Cairo.Internal.Surfaces.Surface , module Graphics.Rendering.Cairo.Internal.Utilities ) where import Graphics.Rendering.Cairo.Types import Graphics.Rendering.Cairo.Internal.Drawing.Cairo import Graphics.Rendering.Cairo.Internal.Drawing.Paths import Graphics.Rendering.Cairo.Internal.Drawing.Patterns import Graphics.Rendering.Cairo.Internal.Drawing.Text import Graphics.Rendering.Cairo.Internal.Drawing.Transformations import Graphics.Rendering.Cairo.Internal.Fonts.FontOptions import Graphics.Rendering.Cairo.Internal.Surfaces.Image import Graphics.Rendering.Cairo.Internal.Surfaces.PDF import Graphics.Rendering.Cairo.Internal.Surfaces.PNG import Graphics.Rendering.Cairo.Internal.Surfaces.PS import Graphics.Rendering.Cairo.Internal.Surfaces.SVG import Graphics.Rendering.Cairo.Internal.Surfaces.Surface import Graphics.Rendering.Cairo.Internal.Utilities import Control.Monad.Reader import Control.Exception (bracket) -- | The Render monad. All drawing operations take place in a Render context. -- You can obtain a Render context for a 'Surface' using 'renderWith'. -- newtype Render m = Render { runRender :: ReaderT Cairo IO m } deriving (Functor, Monad, MonadIO, MonadReader Cairo) {-# INLINE bracketR #-} bracketR :: IO a -> (a -> IO b) -> (a -> Render c) -> Render c bracketR begin end action = Render $ ReaderT $ \r -> bracket begin end (\s -> runReaderT (runRender $ action s) r)
thiagoarrais/gtk2hs
cairo/Graphics/Rendering/Cairo/Internal.hs
lgpl-2.1
2,713
0
13
298
455
320
135
42
1
import Test.HUnit import Precedence import Test import System.Exit import qualified WnegSum import qualified WmulSum import qualified WsumSub import qualified WsumSum import qualified WeqBand import qualified WbandXor import qualified WxorBor allTests = (TestList . map (\(name, test) -> TestLabel name $ repeatedTest 1024 test)) [ ("negSum", test2 negSum WnegSum.negSum) , ("mulSum", test3 mulSum WmulSum.mulSum) , ("sumSub", test3 sumSub WsumSub.sumSub) , ("sumSum", test3 sumSum WsumSum.sumSum) , ("eqBand", test3 eqBand WeqBand.eqBand) , ("bandXor", test3 bandXor WbandXor.bandXor) , ("xorBor", test3 xorBor WxorBor.xorBor)] main = do counts <- runTestTT allTests if errors counts > 0 || failures counts > 0 then exitFailure else exitSuccess
yjwen/hada
test/TestPrecedence.hs
lgpl-3.0
861
0
12
214
245
138
107
23
2
{-# LANGUAGE CPP, MagicHash, RankNTypes #-} -- | -- Module : GHC.Vacuum.Internal -- Copyright : (c) Matt Morrow 2009, Austin Seipp 2011-2012 -- License : LGPLv3 -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : non-portable (GHC only) -- -- Internal vacuum module. You probably shouldn't be here. -- module GHC.Vacuum.Internal ( HValue ,HalfWord ,ItblCode ,StgInfoTable(..) ,ghciTablesNextToCode ,dataConInfoPtrToNames ,wORD_SIZE ,hALF_WORD_SIZE ,S(..),get,gets,set,io,modify,runS ) where import Prelude hiding (mod) import Data.Char import Data.Word import Control.Applicative (Applicative(..)) import Control.Monad (ap) import Control.Monad.Fix import Foreign import Data.List import GHC.Prim import GHC.Exts #include "ghcplatform.h" #include "ghcautoconf.h" #define GHCI_TABLES_NEXT_TO_CODE -- is there somewhere to get this define? ----------------------------------------------------------------------------- -- * Fabricate what we need to avoid the ghc pkg dep type HValue = Any #if SIZEOF_VOID_P == 8 type HalfWord = Word32 #else type HalfWord = Word16 #endif -- | From SMRep type ByteOff = Int -- | From SMRep type WordOff = Int -- | From SMRep type StgWord = Word -- hmmmmmm. Is there any way to tell this? opt_SccProfilingOn :: Bool opt_SccProfilingOn = False -- ghci> wORD_SIZE -- 8 -- ghci> sizeOf (undefined :: Word) -- 8 wORD_SIZE :: Int wORD_SIZE = sizeOf (undefined :: Word) hALF_WORD_SIZE :: Int hALF_WORD_SIZE = wORD_SIZE `div` 2 -- | This is currently always True since -- i'm not sure how to get at the CPP define -- \"GHCI_TABLES_NEXT_TO_CODE\" (or equiv) to tell. ghciTablesNextToCode :: Bool #ifdef GHCI_TABLES_NEXT_TO_CODE ghciTablesNextToCode = True #else ghciTablesNextToCode = False #endif ----------------------------------------------------------------------------- data StgInfoTable = StgInfoTable { #ifndef GHCI_TABLES_NEXT_TO_CODE entry :: Ptr (), #endif ptrs :: HalfWord, nptrs :: HalfWord, tipe :: HalfWord, srtlen :: HalfWord #ifdef GHCI_TABLES_NEXT_TO_CODE , code :: [ItblCode] #endif } instance Storable StgInfoTable where sizeOf itbl = sum [ #ifndef GHCI_TABLES_NEXT_TO_CODE fieldSz entry itbl, #endif fieldSz ptrs itbl, fieldSz nptrs itbl, fieldSz tipe itbl, fieldSz srtlen itbl #ifdef GHCI_TABLES_NEXT_TO_CODE ,fieldSz (head.code) itbl * itblCodeLength #endif ] alignment _ = SIZEOF_VOID_P poke a0 itbl = flip evalS (castPtr a0) $ do #ifndef GHCI_TABLES_NEXT_TO_CODE store (entry itbl) #endif store (ptrs itbl) store (nptrs itbl) store (tipe itbl) store (srtlen itbl) #ifdef GHCI_TABLES_NEXT_TO_CODE sequence_ (map store (code itbl)) #endif peek a0 = flip evalS (castPtr a0) $ do #ifndef GHCI_TABLES_NEXT_TO_CODE entry <- load #endif ptrs' <- load nptrs' <- load tipe' <- load srtlen' <- load #ifdef GHCI_TABLES_NEXT_TO_CODE code' <- sequence (replicate itblCodeLength load) #endif return StgInfoTable { #ifndef GHCI_TABLES_NEXT_TO_CODE entry = entry, #endif ptrs = ptrs', nptrs = nptrs', tipe = tipe', srtlen = srtlen' #ifdef GHCI_TABLES_NEXT_TO_CODE ,code = code' #endif } fieldSz :: (Storable a, Storable b) => (a -> b) -> a -> Int fieldSz sel x = sizeOf (sel x) type PtrIO = S (Ptr Word8) advance :: Storable a => PtrIO (Ptr a) advance = S adv where adv k addr = case castPtr addr of addrCast -> k addrCast (addr `plusPtr` sizeOfPointee addrCast) sizeOfPointee :: (Storable a) => Ptr a -> Int sizeOfPointee addr = sizeOf (typeHack addr) where typeHack = undefined :: Ptr a -> a store :: Storable a => a -> PtrIO () store x = do addr <- advance io (poke addr x) load :: Storable a => PtrIO a load = do addr <- advance io (peek addr) newtype S s a = S {unS :: forall o. (a -> s -> IO o) -> s -> IO o} instance Functor (S s) where fmap f (S g) = S (\k -> g (k . f)) instance Applicative (S s) where pure = return (<*>) = ap instance Monad (S s) where return a = S (\k -> k a) S g >>= f = S (\k -> g (\a -> unS (f a) k)) instance MonadFix (S s) where mfix f = S (\k s -> uncurry k =<< mfix (\ ~(a,_) -> -- the lazy pattern is ESSENTIAL, otherwise <<loop>> unS (f a) (\a' s' -> return (a',s')) s)) get :: S s s get = S (\k s -> k s s) gets :: (s -> a) -> S s a gets f = S (\k s -> k (f s) s) set :: s -> S s () set s = S (\k _ -> k () s) io :: IO a -> S s a io m = S (\k s -> flip k s =<< m) modify :: (s -> s) -> S s () modify f = S (\k -> k () . f) runS :: S s a -> s -> IO (a, s) runS (S g) = g (\a -> return . (,) a) evalS :: S s a -> s -> IO a evalS (S g) = g (\a _ -> return a) --execS :: S s a -> s -> IO s --execS (S g) = g (\_ -> return) ----------------------------------------------------------------------------- -- VACUUM: All this just to get itblCodeLength. -- Make code which causes a jump to the given address. This is the -- only arch-dependent bit of the itbl story. The returned list is -- itblCodeLength elements (bytes) long. -- For sparc_TARGET_ARCH, i386_TARGET_ARCH, etc. -- #include "nativeGen/NCG.h" -- VACUUM: we get *_TARGET_ARCH from ghcplatform.h instead itblCodeLength :: Int itblCodeLength = length (mkJumpToAddr undefined) mkJumpToAddr :: Ptr () -> [ItblCode] ptrToInt :: Ptr a -> Int ptrToInt (Ptr a#) = I# (addr2Int# a#) #if sparc_TARGET_ARCH -- After some consideration, we'll try this, where -- 0x55555555 stands in for the address to jump to. -- According to ghc/includes/MachRegs.h, %g3 is very -- likely indeed to be baggable. -- -- 0000 07155555 sethi %hi(0x55555555), %g3 -- 0004 8610E155 or %g3, %lo(0x55555555), %g3 -- 0008 81C0C000 jmp %g3 -- 000c 01000000 nop type ItblCode = Word32 mkJumpToAddr a = let w32 = fromIntegral (ptrToInt a) hi22, lo10 :: Word32 -> Word32 lo10 x = x .&. 0x3FF hi22 x = (x `shiftR` 10) .&. 0x3FFFF in [ 0x07000000 .|. (hi22 w32), 0x8610E000 .|. (lo10 w32), 0x81C0C000, 0x01000000 ] #elif powerpc_TARGET_ARCH -- We'll use r12, for no particular reason. -- 0xDEADBEEF stands for the adress: -- 3D80DEAD lis r12,0xDEAD -- 618CBEEF ori r12,r12,0xBEEF -- 7D8903A6 mtctr r12 -- 4E800420 bctr type ItblCode = Word32 mkJumpToAddr a = let w32 = fromIntegral (ptrToInt a) hi16 x = (x `shiftR` 16) .&. 0xFFFF lo16 x = x .&. 0xFFFF in [ 0x3D800000 .|. hi16 w32, 0x618C0000 .|. lo16 w32, 0x7D8903A6, 0x4E800420 ] #elif i386_TARGET_ARCH -- Let the address to jump to be 0xWWXXYYZZ. -- Generate movl $0xWWXXYYZZ,%eax ; jmp *%eax -- which is -- B8 ZZ YY XX WW FF E0 type ItblCode = Word8 mkJumpToAddr a = let w32 = fromIntegral (ptrToInt a) :: Word32 insnBytes :: [Word8] insnBytes = [0xB8, byte0 w32, byte1 w32, byte2 w32, byte3 w32, 0xFF, 0xE0] in insnBytes #elif x86_64_TARGET_ARCH -- Generates: -- jmpq *.L1(%rip) -- .align 8 -- .L1: -- .quad <addr> -- -- We need a full 64-bit pointer (we can't assume the info table is -- allocated in low memory). Assuming the info pointer is aligned to -- an 8-byte boundary, the addr will also be aligned. type ItblCode = Word8 mkJumpToAddr a = let w64 = fromIntegral (ptrToInt a) :: Word64 insnBytes :: [Word8] insnBytes = [0xff, 0x25, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, byte0 w64, byte1 w64, byte2 w64, byte3 w64, byte4 w64, byte5 w64, byte6 w64, byte7 w64] in insnBytes #elif alpha_TARGET_ARCH type ItblCode = Word32 mkJumpToAddr a = [ 0xc3800000 -- br at, .+4 , 0xa79c000c -- ldq at, 12(at) , 0x6bfc0000 -- jmp (at) # with zero hint -- oh well , 0x47ff041f -- nop , fromIntegral (w64 .&. 0x0000FFFF) , fromIntegral ((w64 `shiftR` 32) .&. 0x0000FFFF) ] where w64 = fromIntegral (ptrToInt a) :: Word64 #else type ItblCode = Word32 mkJumpToAddr a = undefined #endif byte0, byte1, byte2, byte3, byte4, byte5, byte6, byte7 :: (Integral w, Bits w) => w -> Word8 byte0 w = fromIntegral w byte1 w = fromIntegral (w `shiftR` 8) byte2 w = fromIntegral (w `shiftR` 16) byte3 w = fromIntegral (w `shiftR` 24) byte4 w = fromIntegral (w `shiftR` 32) byte5 w = fromIntegral (w `shiftR` 40) byte6 w = fromIntegral (w `shiftR` 48) byte7 w = fromIntegral (w `shiftR` 56) ----------------------------------------------------------------------------- -- -- Info table offsets -- ----------------------------------------------------------------------------- stdInfoTableSizeW :: WordOff -- The size of a standard info table varies with profiling/ticky etc, -- so we can't get it from Constants -- It must vary in sync with mkStdInfoTable stdInfoTableSizeW = size_fixed + size_prof where size_fixed = 2 -- layout, type size_prof | opt_SccProfilingOn = 2 | otherwise = 0 stdInfoTableSizeB :: ByteOff stdInfoTableSizeB = stdInfoTableSizeW * wORD_SIZE {-- stdSrtBitmapOffset :: ByteOff -- Byte offset of the SRT bitmap half-word which is -- in the *higher-addressed* part of the type_lit stdSrtBitmapOffset = stdInfoTableSizeB - hALF_WORD_SIZE stdClosureTypeOffset :: ByteOff -- Byte offset of the closure type half-word stdClosureTypeOffset = stdInfoTableSizeB - wORD_SIZE stdPtrsOffset, stdNonPtrsOffset :: ByteOff stdPtrsOffset = stdInfoTableSizeB - 2*wORD_SIZE stdNonPtrsOffset = stdInfoTableSizeB - 2*wORD_SIZE + hALF_WORD_SIZE --} ------------------------------------------------ -- * This section is taken from Linker.lhs -- % -- % (c) The University of Glasgow 2005-2006 -- % -- | Given a data constructor in the heap, find its Name. -- The info tables for data constructors have a field which records -- the source name of the constructor as a Ptr Word8 (UTF-8 encoded -- string). The format is: -- -- Package:Module.Name -- -- We use this string to lookup the interpreter's internal representation of the name -- using the lookupOrig. b2s :: [Word8] -> String b2s = fmap (chr . fromIntegral) dataConInfoPtrToNames :: Ptr () -> IO (String, String, String) dataConInfoPtrToNames x = do let ptr = castPtr x :: Ptr StgInfoTable conDescAddress <- getConDescAddress ptr theString <- peekArray0 0 conDescAddress let (pkg, mod, occ) = parse theString return (b2s pkg, b2s mod, b2s occ) {- To find the string in the constructor's info table we need to consider the layout of info tables relative to the entry code for a closure. An info table can be next to the entry code for the closure, or it can be separate. The former (faster) is used in registerised versions of ghc, and the latter (portable) is for non-registerised versions. The diagrams below show where the string is to be found relative to the normal info table of the closure. 1) Code next to table: -------------- | | <- pointer to the start of the string -------------- | | <- the (start of the) info table structure | | | | -------------- | entry code | | .... | In this case the pointer to the start of the string can be found in the memory location _one word before_ the first entry in the normal info table. 2) Code NOT next to table: -------------- info table structure -> | *------------------> -------------- | | | entry code | | | | .... | -------------- ptr to start of str -> | | -------------- In this case the pointer to the start of the string can be found in the memory location: info_table_ptr + info_table_size -} getConDescAddress :: Ptr StgInfoTable -> IO (Ptr Word8) getConDescAddress ptr | ghciTablesNextToCode = do offsetToString <- peek (ptr `plusPtr` (negate wORD_SIZE)) return $ (ptr `plusPtr` stdInfoTableSizeB) `plusPtr` (fromIntegral (offsetToString :: StgWord)) | otherwise = peek . intPtrToPtr . (+ fromIntegral stdInfoTableSizeB) . ptrToIntPtr $ ptr -- parsing names is a little bit fiddly because we have a string in the form: -- pkg:A.B.C.foo, and we want to split it into three parts: ("pkg", "A.B.C", "foo"). -- Thus we split at the leftmost colon and the rightmost occurrence of the dot. -- It would be easier if the string was in the form pkg:A.B.C:foo, but alas -- this is not the conventional way of writing Haskell names. We stick with -- convention, even though it makes the parsing code more troublesome. -- Warning: this code assumes that the string is well formed. XXXXXXXXXXXXXXXXXXX parse :: [Word8] -> ([Word8], [Word8], [Word8]) parse input = if not . all (>0) . fmap length $ [pkg,mod,occ] then (error . concat) ["getConDescAddress:parse:" ,"(not . all (>0) . fmap le" ,"ngth $ [pkg,mod,occ]"] else (pkg, mod, occ) -- = ASSERT (all (>0) (map length [pkg, mod, occ])) (pkg, mod, occ) -- XXXXXXXXXXXXXXXX where (pkg, rest1) = break (== fromIntegral (ord ':')) input (mod, occ) = (concat $ intersperse [dot] $ reverse modWords, occWord) where (modWords, occWord) = if (length rest1 < 1) -- XXXXXXXXx YUKX then error "getConDescAddress:parse:length rest1 < 1" else parseModOcc [] (tail rest1) -- ASSERT (length rest1 > 0) (parseModOcc [] (tail rest1)) dot = fromIntegral (ord '.') parseModOcc :: [[Word8]] -> [Word8] -> ([[Word8]], [Word8]) parseModOcc acc str = case break (== dot) str of (top, []) -> (acc, top) (top, _:bot) -> parseModOcc (top : acc) bot ------------------------------------------------
thoughtpolice/vacuum
src/GHC/Vacuum/Internal.hs
lgpl-3.0
14,638
0
17
4,082
2,653
1,465
1,188
183
4
module Network.Haskoin.Crypto.ExtendedKeys ( XPubKey(..) , XPrvKey(..) , ChainCode , makeXPrvKey , deriveXPubKey , prvSubKey , pubSubKey , primeSubKey , prvSubKeys , pubSubKeys , primeSubKeys , mulSigSubKey , mulSigSubKeys , xPrvIsPrime , xPubIsPrime , xPrvChild , xPubChild , xPubID , xPrvID , xPubFP , xPrvFP , xPubAddr , xPubExport , xPrvExport , xPubImport , xPrvImport , xPrvWIF , cycleIndex , cycleIndex' ) where import Control.DeepSeq (NFData, rnf) import Control.Monad (mzero, guard, unless, when, liftM2) import Data.Aeson (Value(String), FromJSON, ToJSON, parseJSON, toJSON, withText) import Data.Binary (Binary, get, put) import Data.Binary.Get (Get, getWord8, getWord32be) import Data.Binary.Put (Put, runPut, putWord8, putWord32be) import Data.Word (Word8, Word32) import Data.Bits (shiftR, setBit, testBit, clearBit) import Data.Maybe (mapMaybe) import qualified Data.Text as T (pack, unpack) import qualified Data.ByteString as BS (ByteString, append) import Network.Haskoin.Util import Network.Haskoin.Constants import Network.Haskoin.Crypto.Keys import Network.Haskoin.Crypto.Hash import Network.Haskoin.Crypto.Base58 import Network.Haskoin.Crypto.BigWord import Network.Haskoin.Crypto.Curve import Network.Haskoin.Crypto.Point {- See BIP32 for details: https://en.bitcoin.it/wiki/BIP_0032 -} type ChainCode = Word256 -- | Data type representing an extended BIP32 private key. An extended key -- is a node in a tree of key derivations. It has a depth in the tree, a -- parent node and an index to differentiate it from other siblings. data XPrvKey = XPrvKey { xPrvDepth :: !Word8 -- ^ Depth in the tree of key derivations. , xPrvParent :: !Word32 -- ^ Fingerprint of the parent key. , xPrvIndex :: !Word32 -- ^ Key derivation index. , xPrvChain :: !ChainCode -- ^ Chain code. , xPrvKey :: !PrvKey -- ^ The private key of this extended key node. } deriving (Eq, Show, Read) instance NFData XPrvKey where rnf (XPrvKey d p i c k) = rnf d `seq` rnf p `seq` rnf i `seq` rnf c `seq` rnf k instance ToJSON XPrvKey where toJSON = String . T.pack . xPrvExport instance FromJSON XPrvKey where parseJSON = withText "xprvkey" $ \t -> maybe mzero return $ xPrvImport (T.unpack t) -- | Data type representing an extended BIP32 public key. data XPubKey = XPubKey { xPubDepth :: !Word8 -- ^ Depth in the tree of key derivations. , xPubParent :: !Word32 -- ^ Fingerprint of the parent key. , xPubIndex :: !Word32 -- ^ Key derivation index. , xPubChain :: !ChainCode -- ^ Chain code. , xPubKey :: !PubKey -- ^ The public key of this extended key node. } deriving (Eq, Show, Read) instance NFData XPubKey where rnf (XPubKey d p i c k) = rnf d `seq` rnf p `seq` rnf i `seq` rnf c `seq` rnf k instance ToJSON XPubKey where toJSON = String . T.pack . xPubExport instance FromJSON XPubKey where parseJSON = withText "xpubkey" $ \t -> maybe mzero return $ xPubImport (T.unpack t) -- | Build a BIP32 compatible extended private key from a bytestring. This will -- produce a root node (depth=0 and parent=0). makeXPrvKey :: BS.ByteString -> Maybe XPrvKey makeXPrvKey bs = do pk' <- makePrvKey $ fromIntegral pk return $ XPrvKey 0 0 0 c pk' where (pk,c) = split512 $ hmac512 (stringToBS "Bitcoin seed") bs -- | Derive an extended public key from an extended private key. This function -- will preserve the depth, parent, index and chaincode fields of the extended -- private keys. deriveXPubKey :: XPrvKey -> XPubKey deriveXPubKey (XPrvKey d p i c k) = XPubKey d p i c (derivePubKey k) -- | Compute a private, non-prime child key derivation. A private non-prime -- derivation will allow the equivalent extended public key to derive the -- public key for this child. Given a parent key /m/ and a derivation index /i/, -- this function will compute m\/i\/. -- -- Non-prime derivations allow for more flexibility such as read-only wallets. -- However, care must be taken not the leak both the parent extended public -- key and one of the extended child private keys as this would compromise the -- extended parent private key. prvSubKey :: XPrvKey -- ^ Extended parent private key -> Word32 -- ^ Child derivation index -> Maybe XPrvKey -- ^ Extended child private key prvSubKey xkey child = guardIndex child >> do k <- addPrvKeys (xPrvKey xkey) a return $ XPrvKey (xPrvDepth xkey + 1) (xPrvFP xkey) child c k where pK = xPubKey $ deriveXPubKey xkey msg = BS.append (encode' pK) (encode' child) (a,c) = split512 $ hmac512 (encode' $ xPrvChain xkey) msg -- | Compute a public, non-prime child key derivation. Given a parent key /M/ -- and a derivation index /i/, this function will compute M\/i\/. pubSubKey :: XPubKey -- ^ Extended Parent public key -> Word32 -- ^ Child derivation index -> Maybe XPubKey -- ^ Extended child public key pubSubKey xKey child = guardIndex child >> do pK <- addPubKeys (xPubKey xKey) a return $ XPubKey (xPubDepth xKey + 1) (xPubFP xKey) child c pK where msg = BS.append (encode' $ xPubKey xKey) (encode' child) (a,c) = split512 $ hmac512 (encode' $ xPubChain xKey) msg -- | Compute a prime child key derivation. Prime derivations can only be -- computed for private keys. Prime derivations do not allow the parent -- public key to derive the child public keys. However, they are safer as -- a breach of the parent public key and child private keys does not lead -- to a breach of the parent private key. Given a parent key /m/ and a -- derivation index /i/, this function will compute m\/i'\/. primeSubKey :: XPrvKey -- ^ Extended Parent private key -> Word32 -- ^ Child derivation index -> Maybe XPrvKey -- ^ Extended child private key primeSubKey xkey child = guardIndex child >> do k <- addPrvKeys (xPrvKey xkey) a return $ XPrvKey (xPrvDepth xkey + 1) (xPrvFP xkey) i c k where i = setBit child 31 msg = BS.append (bsPadPrvKey $ xPrvKey xkey) (encode' i) (a,c) = split512 $ hmac512 (encode' $ xPrvChain xkey) msg -- Add two private keys together. One of the keys is defined by a Word256. -- The functions fails on uncompressed private keys and return Nothing if the -- Word256 is smaller than the order of the curve N. addPrvKeys :: PrvKey -> Word256 -> Maybe PrvKey addPrvKeys key i | isPrvKeyU key = error "Add: HDW only supports compressed formats" | toInteger i < curveN = let r = (prvKeyFieldN key) + (fromIntegral i :: FieldN) in makePrvKey $ toInteger r | otherwise = Nothing -- Add a public key to a private key defined by its Word256 value. This will -- transform the private key into a public key and add the respective public -- key points together. This function fails for uncompressed keys and returns -- Nothing if the private key value is >= than the order of the curve N. addPubKeys :: PubKey -> Word256 -> Maybe PubKey addPubKeys pub i | isPubKeyU pub = error "Add: HDW only supports compressed formats" | toInteger i < curveN = let pt1 = mulPoint (fromIntegral i :: FieldN) curveG pt2 = addPoint (pubKeyPoint pub) pt1 in if isInfPoint pt2 then Nothing else Just $ PubKey pt2 | otherwise = Nothing -- | Cyclic list of all private non-prime child key derivations of a parent key -- starting from an offset index. prvSubKeys :: XPrvKey -> Word32 -> [(XPrvKey,Word32)] prvSubKeys k i = mapMaybe f $ cycleIndex i where f j = liftM2 (,) (prvSubKey k j) (return j) -- | Cyclic list of all public non-prime child key derivations of a parent key -- starting from an offset index. pubSubKeys :: XPubKey -> Word32 -> [(XPubKey,Word32)] pubSubKeys k i = mapMaybe f $ cycleIndex i where f j = liftM2 (,) (pubSubKey k j) (return j) -- | Cyclic list of all prime child key derivations of a parent key starting -- from an offset index. primeSubKeys :: XPrvKey -> Word32 -> [(XPrvKey,Word32)] primeSubKeys k i = mapMaybe f $ cycleIndex i where f j = liftM2 (,) (primeSubKey k j) (return j) -- | Compute a public, non-prime subkey derivation for all of the parent public -- keys in the input. This function will succeed only if the child key -- derivations for all the parent keys are valid. -- -- This function is intended to be used in the context of multisignature -- accounts. Parties exchanging their master public keys to create a -- multisignature account can then individually generate all the receiving -- multisignature addresses without further communication. mulSigSubKey :: [XPubKey] -- ^ List of extended parent public keys -> Word32 -- ^ Child key derivation index -> Maybe [XPubKey] -- ^ List of extended child public keys mulSigSubKey pubs i = mapM (flip pubSubKey i) pubs -- | Cyclic list of all public, non-prime multisig key derivations of a list -- of parent keys starting from an offset index. mulSigSubKeys :: [XPubKey] -> Word32 -> [([XPubKey],Word32)] mulSigSubKeys pubs i = mapMaybe f $ cycleIndex i where f j = liftM2 (,) (mulSigSubKey pubs j) (return j) cycleIndex :: Word32 -> [Word32] cycleIndex i | i == 0 = cycle [0..0x7fffffff] | i < 0x80000000 = cycle $ [i..0x7fffffff] ++ [0..(i-1)] | otherwise = error $ "cycleIndex: invalid index " ++ (show i) -- Cycle in reverse cycleIndex' :: Word32 -> [Word32] cycleIndex' i | i == 0 = cycle $ 0 : [0x7fffffff,0x7ffffffe..1] | i == 0x7fffffff = cycle [0x7fffffff,0x7ffffffe..0] | i == 0x7ffffffe = cycle $ [0x7ffffffe,0x7ffffffd..0] ++ [0x7fffffff] | i < 0x80000000 = cycle $ [i,(i-1)..0] ++ [0x7fffffff,0x7ffffffe..(i+1)] | otherwise = error $ "cycleIndex: invalid index " ++ (show i) guardIndex :: Word32 -> Maybe () guardIndex child = guard $ child >= 0 && child < 0x80000000 -- | Returns True if the extended private key was derived through a prime -- derivation. xPrvIsPrime :: XPrvKey -> Bool xPrvIsPrime k = testBit (xPrvIndex k) 31 -- | Returns True if the extended public key was derived through a prime -- derivation. xPubIsPrime :: XPubKey -> Bool xPubIsPrime k = testBit (xPubIndex k) 31 -- | Returns the derivation index of this extended private key without the -- prime bit set. xPrvChild :: XPrvKey -> Word32 xPrvChild k = clearBit (xPrvIndex k) 31 -- | Returns the derivation index of this extended public key without the prime -- bit set. xPubChild :: XPubKey -> Word32 xPubChild k = clearBit (xPubIndex k) 31 -- | Computes the key identifier of an extended private key. xPrvID :: XPrvKey -> Word160 xPrvID = xPubID . deriveXPubKey -- | Computes the key identifier of an extended public key. xPubID :: XPubKey -> Word160 xPubID = hash160 . hash256BS . encode' . xPubKey -- | Computes the key fingerprint of an extended private key. xPrvFP :: XPrvKey -> Word32 xPrvFP = fromIntegral . (`shiftR` 128) . xPrvID -- | Computes the key fingerprint of an extended public key. xPubFP :: XPubKey -> Word32 xPubFP = fromIntegral . (`shiftR` 128) . xPubID -- | Computer the 'Address' of an extended public key. xPubAddr :: XPubKey -> Address xPubAddr = pubKeyAddr . xPubKey -- | Exports an extended private key to the BIP32 key export format (base 58). xPrvExport :: XPrvKey -> String xPrvExport = bsToString . encodeBase58Check . encode' -- | Exports an extended public key to the BIP32 key export format (base 58). xPubExport :: XPubKey -> String xPubExport = bsToString . encodeBase58Check . encode' -- | Decodes a BIP32 encoded extended private key. This function will fail if -- invalid base 58 characters are detected or if the checksum fails. xPrvImport :: String -> Maybe XPrvKey xPrvImport str = decodeToMaybe =<< (decodeBase58Check $ stringToBS str) -- | Decodes a BIP32 encoded extended public key. This function will fail if -- invalid base 58 characters are detected or if the checksum fails. xPubImport :: String -> Maybe XPubKey xPubImport str = decodeToMaybe =<< (decodeBase58Check $ stringToBS str) -- | Export an extended private key to WIF (Wallet Import Format). xPrvWIF :: XPrvKey -> String xPrvWIF = toWIF . xPrvKey instance Binary XPrvKey where get = do ver <- getWord32be unless (ver == extSecretPrefix) $ fail $ "Get: Invalid version for extended private key" dep <- getWord8 par <- getWord32be idx <- getWord32be chn <- get prv <- getPadPrvKey return $ XPrvKey dep par idx chn prv put k = do putWord32be extSecretPrefix putWord8 $ xPrvDepth k putWord32be $ xPrvParent k putWord32be $ xPrvIndex k put $ xPrvChain k putPadPrvKey $ xPrvKey k instance Binary XPubKey where get = do ver <- getWord32be unless (ver == extPubKeyPrefix) $ fail $ "Get: Invalid version for extended public key" dep <- getWord8 par <- getWord32be idx <- getWord32be chn <- get pub <- get when (isPubKeyU pub) $ fail $ "Invalid public key. Only compressed format is supported" return $ XPubKey dep par idx chn pub put k = do putWord32be extPubKeyPrefix putWord8 $ xPubDepth k putWord32be $ xPubParent k putWord32be $ xPubIndex k put $ xPubChain k when (isPubKeyU (xPubKey k)) $ fail $ "Only compressed public keys are supported" put $ xPubKey k {- Utilities for extended keys -} -- De-serialize HDW-specific private key getPadPrvKey :: Get PrvKey getPadPrvKey = do pad <- getWord8 unless (pad == 0x00) $ fail $ "Private key must be padded with 0x00" getPrvKey -- Compressed version -- Serialize HDW-specific private key putPadPrvKey :: PrvKey -> Put putPadPrvKey p = putWord8 0x00 >> putPrvKey p bsPadPrvKey :: PrvKey -> BS.ByteString bsPadPrvKey = toStrictBS . runPut . putPadPrvKey
nuttycom/haskoin
Network/Haskoin/Crypto/ExtendedKeys.hs
unlicense
14,096
0
14
3,233
3,139
1,670
1,469
257
2