Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Optimise module to filename #4600

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
guibou wants to merge 4 commits into master
base: master
Choose a base branch
Loading
from optimise_module_to_filename
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions flake.nix
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
chmod +x $dest
'';

mkDevShell = hpkgs: with pkgs; mkShell {
mkDevShell = hpkgs: with pkgs; pkgs.mkShell {
name = "haskell-language-server-dev-ghc${hpkgs.ghc.version}";
# For binary Haskell tools, we use the default Nixpkgs GHC version.
# This removes a rebuild with a different GHC version. The drawback of
Expand Down Expand Up @@ -106,7 +106,9 @@
in {
# Developement shell with only dev tools
devShells = {
default = mkDevShell pkgs.haskellPackages;
default = pkgs.mkShell {
buildInputs = with pkgs; [zlib haskell.compiler.ghc910 cabal-install];
};
shell-ghc96 = mkDevShell pkgs.haskell.packages.ghc96;
shell-ghc98 = mkDevShell pkgs.haskell.packages.ghc98;
shell-ghc910 = mkDevShell pkgs.haskell.packages.ghc910;
Expand Down
1 change: 1 addition & 0 deletions ghcide/ghcide.cabal
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ library
, unliftio-core
, unordered-containers >=0.2.10.0
, vector
, pretty-simple

if os(windows)
build-depends: Win32
Expand Down
10 changes: 10 additions & 0 deletions ghcide/src/Development/IDE/Core/RuleTypes.hs
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -412,6 +412,9 @@ type instance RuleResult GetModSummary = ModSummaryResult
-- | Generate a ModSummary with the timestamps and preprocessed content elided, for more successful early cutoff
type instance RuleResult GetModSummaryWithoutTimestamps = ModSummaryResult

type instance RuleResult GetModulesPaths = (M.Map ModuleName (UnitId, NormalizedFilePath),
M.Map ModuleName (UnitId, NormalizedFilePath))

data GetParsedModule = GetParsedModule
deriving (Eq, Show, Generic)
instance Hashable GetParsedModule
Expand Down Expand Up @@ -524,6 +527,13 @@ data GetModSummaryWithoutTimestamps = GetModSummaryWithoutTimestamps
instance Hashable GetModSummaryWithoutTimestamps
instance NFData GetModSummaryWithoutTimestamps

-- | Scan all the import directory for existing modules and build a map from
-- module name to paths
data GetModulesPaths = GetModulesPaths
deriving (Eq, Show, Generic)
instance Hashable GetModulesPaths
instance NFData GetModulesPaths

data GetModSummary = GetModSummary
deriving (Eq, Show, Generic)
instance Hashable GetModSummary
Expand Down
79 changes: 62 additions & 17 deletions ghcide/src/Development/IDE/Core/Rules.hs
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE PartialTypeSignatures #-}

-- | A Shake implementation of the compiler service, built
-- using the "Shaker" abstraction layer for in-memory use.
Expand Down Expand Up @@ -93,7 +94,7 @@
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Utf16.Rope.Mixed as Rope
import Data.Time (UTCTime (..))
import Data.Time (UTCTime (..), getCurrentTime, diffUTCTime)
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.Tuple.Extra
import Data.Typeable (cast)
Expand Down Expand Up @@ -175,6 +176,12 @@

import qualified Data.IntMap as IM
import GHC.Fingerprint
import Text.Pretty.Simple
import qualified Data.Map.Strict as Map
import System.FilePath (takeExtension, takeFileName, normalise, dropTrailingPathSeparator, dropExtension, splitDirectories)
import Data.Char (isUpper)
import System.Directory.Extra (listFilesRecursive, listFilesInside)
import System.IO.Unsafe

data Log
= LogShake Shake.Log
Expand Down Expand Up @@ -319,30 +326,21 @@
getLocatedImportsRule :: Recorder (WithPriority Log) -> Rules ()
getLocatedImportsRule recorder =
define (cmapWithPrio LogShake recorder) $ \GetLocatedImports file -> do

ModSummaryResult{msrModSummary = ms} <- use_ GetModSummaryWithoutTimestamps file
(KnownTargets targets targetsMap) <- useNoFile_ GetKnownTargets
-- TODO: should we reverse this concatenation, there are way less
-- source import than normal import in theory, so it should be faster
let imports = [(False, imp) | imp <- ms_textual_imps ms] ++ [(True, imp) | imp <- ms_srcimps ms]
env_eq <- use_ GhcSession file
let env = hscEnv env_eq
let import_dirs = map (second homeUnitEnv_dflags) $ hugElts $ hsc_HUG env
let dflags = hsc_dflags env
opt <- getIdeOptions
let getTargetFor modName nfp
| Just (TargetFile nfp') <- HM.lookup (TargetFile nfp) targetsMap = do
-- reuse the existing NormalizedFilePath in order to maximize sharing
itExists <- getFileExists nfp'
return $ if itExists then Just nfp' else Nothing
| Just tt <- HM.lookup (TargetModule modName) targets = do
-- reuse the existing NormalizedFilePath in order to maximize sharing
let ttmap = HM.mapWithKey const (HashSet.toMap tt)
nfp' = HM.lookupDefault nfp nfp ttmap
itExists <- getFileExists nfp'
return $ if itExists then Just nfp' else Nothing
| otherwise = do
itExists <- getFileExists nfp
return $ if itExists then Just nfp else Nothing

moduleMaps <- use_ GetModulesPaths file
(diags, imports') <- fmap unzip $ forM imports $ \(isSource, (mbPkgName, modName)) -> do
diagOrImp <- locateModule (hscSetFlags dflags env) import_dirs (optExtensions opt) getTargetFor modName mbPkgName isSource

diagOrImp <- locateModule moduleMaps (hscSetFlags dflags env) import_dirs (optExtensions opt) modName mbPkgName isSource
case diagOrImp of
Left diags -> pure (diags, Just (modName, Nothing))
Right (FileImport path) -> pure ([], Just (modName, Just path))
Expand Down Expand Up @@ -632,10 +630,55 @@
fs <- toKnownFiles <$> useNoFile_ GetKnownTargets
dependencyInfoForFiles (HashSet.toList fs)

{-# NOINLINE cacheVar #-}
-- TODO: this should not use unsaferPerformIO
cacheVar = unsafePerformIO (newTVarIO mempty)

Check failure on line 635 in ghcide/src/Development/IDE/Core/Rules.hs

View workflow job for this annotation

GitHub Actions / Hlint check run

Error in cacheVar in module Development.IDE.Core.Rules: Avoid restricted function ▫︎ Found: "unsafePerformIO" ▫︎ Note: may break the code

getModulesPathsRule :: Recorder (WithPriority Log) -> Rules ()
getModulesPathsRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetModulesPaths file -> do
env_eq <- use_ GhcSession file

cache <- liftIO (readTVarIO cacheVar)
case Map.lookup (envUnique env_eq) cache of
Just res -> pure (mempty, ([], Just res))
Nothing -> do
let env = hscEnv env_eq
let import_dirs = map (second homeUnitEnv_dflags) $ hugElts $ hsc_HUG env
opt <- getIdeOptions
let exts = (optExtensions opt)

Check warning on line 648 in ghcide/src/Development/IDE/Core/Rules.hs

View workflow job for this annotation

GitHub Actions / Hlint check run

Suggestion in getModulesPathsRule in module Development.IDE.Core.Rules: Redundant bracket ▫︎ Found: "do let env = hscEnv env_eq\n let import_dirs\n = map (second homeUnitEnv_dflags) $ hugElts $ hsc_HUG env\n opt <- getIdeOptions\n let exts = (optExtensions opt)\n let acceptedExtensions\n = concatMap (\\ x -> ['.' : x, '.' : x <> \"-boot\"]) exts\n (unzip -> (a, b)) <- flip mapM import_dirs\n $ \\ (u, dyn)\n -> do (unzip -> (a, b)) <- flip mapM (importPaths dyn)\n $ \\ dir'\n -> do let dir\n = dropTrailingPathSeparator\n dir'\n let predicate path\n = pure\n (path == dir\n ||\n isUpper\n (head\n (takeFileName\n path)))\n let dir_number_directories\n = length\n (splitDirectories\n dir)\n let toModule file\n = mkModuleName\n (intercalate \".\"\n $ drop\n dir_number_directories\n (splitDirectories\n (dropExtension\n file)))\n modules <- (fmap\n (\\ path\n -> (toModule\n path, \n toNormalizedFilePath'\n path))\n . filter\n (\\ y\n -> takeExtension\n y\n `elem`\n acceptedExtensions)\n <$>\n liftIO\n (listFilesInside\n predicate\n dir))\n `catch`\n (\\ (_ :: IOException)\n -> pure [])\n let isSourceModule (_, path)\n = \"-boot\"\n `isSuffixOf`\n fromNormalizedFilePath\n path\n let (sourceModules,\n notSourceModules)\n = partition\n isSourceModule\n modules\n pure\n $ (Map.fromList\n notSourceModules, \n Map.fromList\n sourceModules)\n pure (fmap (u,) $ mconcat a, fmap (u,) $ mconcat b)\n let res = (mconcat a, mconcat b)\n liftIO\n $ atomically\n $ modifyTVar' cacheVar (Map.insert (envUnique env_eq) res)\n pure (mempty, ([], Just $ (mconcat a, mconcat b)))" ▫︎ Perhaps: "do let env = hscEnv env_eq\n let import_dirs\n = map (second homeUnitEnv_dflags) $ hugElts $ hsc_HUG env\n opt <- getIdeOptions\n let exts = optExtensions opt\n let acceptedExtensions\n = concatMap (\\ x -> ['.' : x, '.' : x <> \"-boot\"]) exts\n (unzip -> (a, b)) <- flip mapM import_dirs\n $ \\ (u, dyn)\n -> do (unzip -> (a, b)) <- flip mapM (importPaths dyn)\n $ \\ dir'\n -> do let dir\n = dropTrailingPathSeparator\n dir'\n let predicate path\n = pure\n (path == dir\n ||\n isUpper\n (head\n (takeFileName\n path)))\n let dir_number_directories\n = length\n (splitDirectories\n dir)\n let toModule file\n = mkModuleName\n (intercalate \".\"\n $ drop\n dir_number_directories\n (splitDirectories\n (dropExtension\n file)))\n modules <- (fmap\n (\\ path\n -> (toModule\n path, \n toNormalizedFilePath'\n path))\n . filter\n (\\ y\n -> takeExtension\n y\n `elem`\n acceptedExtensions)\n <$>\n liftIO\n (listFilesInside\n predicate\n dir))\n `catch`\n (\\ (_ :: IOException)\n -> pure [])\n let isSourceModule (_, path)\n = \"-boot\"\n `isSuffixOf`\n fromNormalizedFilePath\n path\n let (sourceModules,\n notSourceModules)\n = partition\n isSourceModule\n modules\n pure\n $ (Map.fromList\n notSourceModules, \n Map.fromList\n sourceModules)\n pure (fmap (u,) $ mconcat a, fmap (u,) $ mconcat b)\n let res = (mconcat a, mconcat b)\n liftIO\n $ atomically\n $ modifyTVar' cacheVar (Map.insert (envUnique env_eq) res)\n pure (mempty, ([], Just $ (mconcat a, mconcat b)))"
let acceptedExtensions = concatMap (\x -> ['.':x, '.':x <> "-boot"]) exts

(unzip -> (a, b)) <- flip mapM import_dirs $ \(u, dyn) -> do
(unzip -> (a, b)) <- flip mapM (importPaths dyn) $ \dir' -> do
let dir = dropTrailingPathSeparator dir'
let predicate path = pure (path == dir || isUpper (head (takeFileName path)))

Check failure on line 654 in ghcide/src/Development/IDE/Core/Rules.hs

View workflow job for this annotation

GitHub Actions / Hlint check run

Error in getModulesPathsRule in module Development.IDE.Core.Rules: Avoid restricted function ▫︎ Found: "head" ▫︎ Note: may break the code
let dir_number_directories = length (splitDirectories dir)
let toModule file = mkModuleName (intercalate "." $ drop dir_number_directories (splitDirectories (dropExtension file)))

-- TODO: we are taking/droping extension, this could be factorized to save a few cpu cycles ;)
-- TODO: do acceptedextensions needs to be a set ? or a vector?
-- If the directory is empty, we return an empty list of modules
-- using 'catch' instead of an exception which would kill the LSP
modules <- (fmap (\path -> (toModule path, toNormalizedFilePath' path)) . filter (\y -> takeExtension y `elem` acceptedExtensions) <$> liftIO (listFilesInside predicate dir))
`catch` (\(_ :: IOException) -> pure [])
let isSourceModule (_, path) = "-boot" `isSuffixOf` fromNormalizedFilePath path
let (sourceModules, notSourceModules) = partition isSourceModule modules
pure $ (Map.fromList notSourceModules, Map.fromList sourceModules)
pure (fmap (u,) $ mconcat a, fmap (u, ) $ mconcat b)

let res = (mconcat a, mconcat b)
liftIO $ atomically $ modifyTVar' cacheVar (Map.insert (envUnique env_eq) res)

pure (mempty, ([], Just $ (mconcat a, mconcat b)))

dependencyInfoForFiles :: [NormalizedFilePath] -> Action (BS.ByteString, DependencyInformation)
dependencyInfoForFiles fs = do
-- liftIO $ print ("fs length", length fs)
(rawDepInfo, bm) <- rawDependencyInformation fs
-- liftIO $ print ("ok with raw deps")
-- liftIO $ pPrint rawDepInfo
let (all_fs, _all_ids) = unzip $ HM.toList $ pathToIdMap $ rawPathIdMap rawDepInfo
-- liftIO $ print ("all_fs length", length all_fs)
msrs <- uses GetModSummaryWithoutTimestamps all_fs
let mss = map (fmap msrModSummary) msrs
let deps = map (\i -> IM.lookup (getFilePathId i) (rawImports rawDepInfo)) _all_ids
Expand Down Expand Up @@ -714,6 +757,7 @@
IdeGhcSession{loadSessionFun} <- useNoFile_ GhcSessionIO
-- loading is always returning a absolute path now
(val,deps) <- liftIO $ loadSessionFun $ fromNormalizedFilePath file
-- TODO: this is responsible for a LOT of allocations

-- add the deps to the Shake graph
let addDependency fp = do
Expand Down Expand Up @@ -1235,6 +1279,7 @@
getModIfaceRule recorder
getModSummaryRule templateHaskellWarning recorder
getModuleGraphRule recorder
getModulesPathsRule recorder
getFileHashRule recorder
knownFilesRule recorder
getClientSettingsRule recorder
Expand Down
41 changes: 27 additions & 14 deletions ghcide/src/Development/IDE/Import/FindImports.hs
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

module Development.IDE.Import.FindImports
( locateModule
, locateModuleFile
, Import(..)
, ArtifactsLocation(..)
, modSummaryToArtifactsLocation
Expand All @@ -14,9 +13,8 @@ module Development.IDE.Import.FindImports
) where

import Control.DeepSeq
import Control.Monad.Extra
import Control.Monad.IO.Class
import Data.List (find, isSuffixOf)
import Data.List (isSuffixOf)
import Data.Maybe
import qualified Data.Set as S
import Development.IDE.GHC.Compat as Compat
Expand All @@ -26,7 +24,8 @@ import Development.IDE.Types.Diagnostics
import Development.IDE.Types.Location
import GHC.Types.PkgQual
import GHC.Unit.State
import System.FilePath
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map


#if MIN_VERSION_ghc(9,11,0)
Expand Down Expand Up @@ -70,6 +69,7 @@ data LocateResult
| LocateFoundReexport UnitId
| LocateFoundFile UnitId NormalizedFilePath

{-
-- | locate a module in the file system. Where we go from *daml to Haskell
locateModuleFile :: MonadIO m
=> [(UnitId, [FilePath], S.Set ModuleName)]
Expand All @@ -94,6 +94,7 @@ locateModuleFile import_dirss exts targetFor isSource modName = do
maybeBoot ext
| isSource = ext ++ "-boot"
| otherwise = ext
-}

-- | This function is used to map a package name to a set of import paths.
-- It only returns Just for unit-ids which are possible to import into the
Expand All @@ -110,36 +111,45 @@ mkImportDirs _env (i, flags) = Just (i, (importPaths flags, reexportedModules fl
-- Haskell
locateModule
:: MonadIO m
=> HscEnv
=> (Map ModuleName (UnitId, NormalizedFilePath),Map ModuleName (UnitId, NormalizedFilePath))
-> HscEnv
-> [(UnitId, DynFlags)] -- ^ Import directories
-> [String] -- ^ File extensions
-> (ModuleName -> NormalizedFilePath -> m (Maybe NormalizedFilePath)) -- ^ does file exist predicate
-> Located ModuleName -- ^ Module name
-> PkgQual -- ^ Package name
-> Bool -- ^ Is boot module
-> m (Either [FileDiagnostic] Import)
locateModule env comp_info exts targetFor modName mbPkgName isSource = do
locateModule moduleMaps@(moduleMap, moduleMapSource) env comp_info exts modName mbPkgName isSource = do
case mbPkgName of
-- 'ThisPkg' just means some home module, not the current unit
ThisPkg uid
-- TODO: there are MANY lookup on import_paths, which is a problem considering that it can be large.
| Just (dirs, reexports) <- lookup uid import_paths
-> lookupLocal uid dirs reexports
-> lookupLocal moduleMaps uid dirs reexports
| otherwise -> return $ Left $ notFoundErr env modName $ LookupNotFound []
-- if a package name is given we only go look for a package
OtherPkg uid
| Just (dirs, reexports) <- lookup uid import_paths
-> lookupLocal uid dirs reexports
-> lookupLocal moduleMaps uid dirs reexports
| otherwise -> lookupInPackageDB
NoPkgQual -> do

-- Reexports for current unit have to be empty because they only apply to other units depending on the
-- current unit. If we set the reexports to be the actual reexports then we risk looping forever trying
-- to find the module from the perspective of the current unit.
mbFile <- locateModuleFile ((homeUnitId_ dflags, importPaths dflags, S.empty) : other_imports) exts targetFor isSource $ unLoc modName
---- locateModuleFile ((homeUnitId_ dflags, importPaths dflags, S.empty) : other_imports) exts targetFor isSource $ unLoc modName
--
-- TODO: handle the other imports, the unit id, ..., reexport.
-- - TODO: should we look for file existence now? If the file was
-- removed from the disk, how will it behaves? How do we invalidate
-- that?
let mbFile = case Map.lookup (unLoc modName) (if isSource then moduleMapSource else moduleMap) of
Nothing -> LocateNotFound
Just (uid, file) -> LocateFoundFile uid file
case mbFile of
LocateNotFound -> lookupInPackageDB
-- Lookup again with the perspective of the unit reexporting the file
LocateFoundReexport uid -> locateModule (hscSetActiveUnitId uid env) comp_info exts targetFor modName noPkgQual isSource
LocateFoundReexport uid -> locateModule moduleMaps (hscSetActiveUnitId uid env) comp_info exts modName noPkgQual isSource
LocateFoundFile uid file -> toModLocation uid file
where
dflags = hsc_dflags env
Expand Down Expand Up @@ -168,12 +178,15 @@ locateModule env comp_info exts targetFor modName mbPkgName isSource = do
let genMod = mkModule (RealUnit $ Definite uid) (unLoc modName) -- TODO support backpack holes
return $ Right $ FileImport $ ArtifactsLocation file (Just loc) (not isSource) (Just genMod)

lookupLocal uid dirs reexports = do
mbFile <- locateModuleFile [(uid, dirs, reexports)] exts targetFor isSource $ unLoc modName
lookupLocal moduleMaps@(moduleMapSource, moduleMap) uid dirs reexports = do
-- mbFile <- locateModuleFile [(uid, dirs, reexports)] exts targetFor isSource $ unLoc modName
let mbFile = case Map.lookup (unLoc modName) (if isSource then moduleMapSource else moduleMap) of
Nothing -> LocateNotFound
Just (uid, file) -> LocateFoundFile uid file
case mbFile of
LocateNotFound -> return $ Left $ notFoundErr env modName $ LookupNotFound []
-- Lookup again with the perspective of the unit reexporting the file
LocateFoundReexport uid' -> locateModule (hscSetActiveUnitId uid' env) comp_info exts targetFor modName noPkgQual isSource
LocateFoundReexport uid' -> locateModule moduleMaps (hscSetActiveUnitId uid' env) comp_info exts modName noPkgQual isSource
LocateFoundFile uid' file -> toModLocation uid' file

lookupInPackageDB = do
Expand Down
2 changes: 1 addition & 1 deletion ghcide/src/Development/IDE/Types/HscEnvEq.hs
View file Open in desktop
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{-# LANGUAGE CPP #-}
module Development.IDE.Types.HscEnvEq
( HscEnvEq,
hscEnv, newHscEnvEq,
hscEnv, newHscEnvEq, envUnique,
updateHscEnvEq,
envPackageExports,
envVisibleModuleNames,
Expand Down
41 changes: 9 additions & 32 deletions ghcide/src/Development/IDE/Types/KnownTargets.hs
View file Open in desktop
Original file line number Diff line number Diff line change
Expand Up @@ -19,49 +19,26 @@ import Development.IDE.Types.Location
import GHC.Generics

-- | A mapping of module name to known files
data KnownTargets = KnownTargets
{ targetMap :: !(HashMap Target (HashSet NormalizedFilePath))
-- | 'normalisingMap' is a cached copy of `HMap.mapKey const targetMap`
--
-- At startup 'GetLocatedImports' is called on all known files. Say you have 10000
-- modules in your project then this leads to 10000 calls to 'GetLocatedImports'
-- running concurrently.
--
-- In `GetLocatedImports` the known targets are consulted and the targetsMap
-- is created by mapping the known targets. This map is used for introducing
-- sharing amongst filepaths. This operation copies a local copy of the `target`
-- map which is local to the rule.
--
-- @
-- let targetsMap = HMap.mapWithKey const targets
-- @
--
-- So now each rule has a 'HashMap' of size 10000 held locally to it and depending
-- on how the threads are scheduled there will be 10000^2 elements in total
-- allocated in 'HashMap's. This used a lot of memory.
--
-- Solution: Return the 'normalisingMap' in the result of the `GetKnownTargets` rule so it is shared across threads.
, normalisingMap :: !(HashMap Target Target) } deriving Show
newtype KnownTargets = KnownTargets
{ targetMap :: (HashMap Target (HashSet NormalizedFilePath))
} deriving (Show, Eq)


unionKnownTargets :: KnownTargets -> KnownTargets -> KnownTargets
unionKnownTargets (KnownTargets tm nm) (KnownTargets tm' nm') =
KnownTargets (HMap.unionWith (<>) tm tm') (HMap.union nm nm')
unionKnownTargets (KnownTargets tm) (KnownTargets tm') =
KnownTargets (HMap.unionWith (<>) tm tm')

mkKnownTargets :: [(Target, HashSet NormalizedFilePath)] -> KnownTargets
mkKnownTargets vs = KnownTargets (HMap.fromList vs) (HMap.fromList [(k,k) | (k,_) <- vs ])
mkKnownTargets vs = KnownTargets (HMap.fromList vs)

instance NFData KnownTargets where
rnf (KnownTargets tm nm) = rnf tm `seq` rnf nm `seq` ()

instance Eq KnownTargets where
k1 == k2 = targetMap k1 == targetMap k2
rnf (KnownTargets tm) = rnf tm `seq` ()

instance Hashable KnownTargets where
hashWithSalt s (KnownTargets hm _) = hashWithSalt s hm
hashWithSalt s (KnownTargets hm) = hashWithSalt s hm

emptyKnownTargets :: KnownTargets
emptyKnownTargets = KnownTargets HMap.empty HMap.empty
emptyKnownTargets = KnownTargets HMap.empty

data Target = TargetModule ModuleName | TargetFile NormalizedFilePath
deriving ( Eq, Ord, Generic, Show )
Expand Down
Loading

AltStyle によって変換されたページ (->オリジナル) /