From 58b8b687ad578f1ee305ae0e549a6198928582e3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Sat, 2 Nov 2024 09:44:33 +0800 Subject: [PATCH 01/90] Refactor session loading to manage pending files so we can batch load them to improve performance fix #4381 --- .../session-loader/Development/IDE/Session.hs | 55 +++++++++++-------- 1 file changed, 33 insertions(+), 22 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index a1d778ab0e..dab01c982f 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -424,7 +424,7 @@ getHieDbLoc dir = do loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - cradle_files <- newIORef [] + cradle_files <- newIORef (Set.fromList []) -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv @@ -434,6 +434,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- they are inconsistent. So, everywhere you modify 'fileToFlags', -- you have to modify 'filesMap' as well. filesMap <- newVar HM.empty :: IO (Var FilesMap) + pendingFilesTQueue <- newTQueueIO + -- Pending files waiting to be loaded -- Version of the mappings above version <- newVar 0 biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) @@ -550,7 +552,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO (IdeResult HscEnvEq,[FilePath]) + -> IO ((IdeResult HscEnvEq,[FilePath]), HashSet FilePath) session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -562,13 +564,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let new_cache = newComponentCache recorder optExtensions _cfp hscEnv all_target_details <- new_cache old_deps new_deps + let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + all_targets' = concat all_target_details + newLoaded = HM.keys flags_map' this_dep_info <- getDependencyInfo $ maybeToList hieYaml let (all_targets, this_flags_map, this_options) = case HM.lookup _cfp flags_map' of Just this -> (all_targets', flags_map', this) Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) - where all_targets' = concat all_target_details - flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + where this_target_details = TargetDetails (TargetFile _cfp) this_error_env this_dep_info [_cfp] this_flags = (this_error_env, this_dep_info) this_error_env = ([this_error], Nothing) @@ -580,27 +584,27 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) + -- Typecheck all files in the project on startup + checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - keys2 <- invalidateShakeCache restartShakeSession VFSUnmodified "new component" [] $ do + keys2 <- invalidateShakeCache keys1 <- extendKnownTargets all_targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - -- Typecheck all files in the project on startup - checkProject <- getCheckProject - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - - return $ second Map.keys this_options + + return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) consultCradle hieYaml cfp = do @@ -615,11 +619,13 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Display a user friendly progress message here: They probably don't know what a cradle is let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" + + pendingFiles <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp old_files + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ pendingFiles <> old_files) addTag "result" (show res) return res @@ -633,8 +639,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - atomicModifyIORef' cradle_files (\xs -> (cfp:xs,())) - session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + (results, newLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + -- put back to pending que if not listed in the results + let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` newLoaded + atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) + atomicModifyIORef' cradle_files (\xs -> (pendingFiles `Set.intersection` newLoaded <> xs,())) + return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do @@ -708,6 +718,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) returnWithVersion $ \file -> do + atomically $ writeTQueue pendingFilesTQueue $ toAbsolutePath file -- see Note [Serializing runs in separate thread] awaitRunInThread que $ getOptions file From ea002d7ef8f2c8be8663e2689bced7e67b8884ac Mon Sep 17 00:00:00 2001 From: Patrick Date: Sun, 3 Nov 2024 15:28:16 +0800 Subject: [PATCH 02/90] distribute errors to all pending files are being loading --- .../session-loader/Development/IDE/Session.hs | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index dab01c982f..8683b5ada1 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -425,6 +425,7 @@ loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions - loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] cradle_files <- newIORef (Set.fromList []) +-- error_loading_files <- newIORef (Set.fromList []) -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv @@ -606,6 +607,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) + let makeError hieYaml cradle err cfp = do + dep_info <- getDependencyInfo (maybeToList hieYaml) + let ncfp = toNormalizedFilePath' cfp + let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) + void $ modifyVar' fileToFlags $ + Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) + void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + return (fst res) + let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp @@ -648,13 +658,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - dep_info <- getDependencyInfo (maybeToList hieYaml) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml - return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + errors <- mapM (makeError hieYaml cradle err) $ Set.toList pendingFiles + return ((concat errors, Nothing), maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let -- | We allow users to specify a loading strategy. From c78b197000c093e76f5277b7814b81ec32a85564 Mon Sep 17 00:00:00 2001 From: Patrick Date: Sun, 3 Nov 2024 16:15:57 +0800 Subject: [PATCH 03/90] better filter loading files --- cabal.project | 6 ++++++ ghcide/session-loader/Development/IDE/Session.hs | 5 ++++- .../session-loader/Development/IDE/Session/Diagnostics.hs | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/cabal.project b/cabal.project index 08d743c24e..3cae5e5181 100644 --- a/cabal.project +++ b/cabal.project @@ -6,6 +6,7 @@ packages: ./ghcide ./hls-plugin-api ./hls-test-utils +-- ../hiebios index-state: 2024-10-21T00:00:00Z @@ -46,3 +47,8 @@ constraints: if impl(ghc >= 9.9) -- https://github.com/haskell/haskell-language-server/issues/4324 benchmarks: False + +source-repository-package + type: git + location: https://github.com/soulomoon/hie-bios.git + tag: 7dd19b547ba30b760053b5f796c439cba6ac6a07 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 8683b5ada1..a4e8678d43 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -658,7 +658,10 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - errors <- mapM (makeError hieYaml cradle err) $ Set.toList pendingFiles + let failedLoadingFiles = nub $ cfp:concatMap cradleErrorLoadingFiles err + let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` Set.fromList failedLoadingFiles + atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) + errors <- mapM (makeError hieYaml cradle err) $ failedLoadingFiles return ((concat errors, Nothing), maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index a8e35e5965..ac18ff2025 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -27,7 +27,7 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms) cradle nfp +renderCradleError (CradleError deps _ec ms _fps) cradle nfp | HieBios.isCabalCradle cradle = let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) From b87937580e8239024b58d3013cb12f38ec50d0d8 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:20:25 +0800 Subject: [PATCH 04/90] fallback to non-batch load --- cabal.project | 6 -- .../session-loader/Development/IDE/Session.hs | 64 ++++++++++++------- .../Development/IDE/Session/Diagnostics.hs | 2 +- 3 files changed, 43 insertions(+), 29 deletions(-) diff --git a/cabal.project b/cabal.project index 3cae5e5181..08d743c24e 100644 --- a/cabal.project +++ b/cabal.project @@ -6,7 +6,6 @@ packages: ./ghcide ./hls-plugin-api ./hls-test-utils --- ../hiebios index-state: 2024-10-21T00:00:00Z @@ -47,8 +46,3 @@ constraints: if impl(ghc >= 9.9) -- https://github.com/haskell/haskell-language-server/issues/4324 benchmarks: False - -source-repository-package - type: git - location: https://github.com/soulomoon/hie-bios.git - tag: 7dd19b547ba30b760053b5f796c439cba6ac6a07 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index a4e8678d43..1dc4135923 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -146,10 +146,13 @@ data Log | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) | LogHieBios HieBios.Log | LogSessionLoadingChanged + | LogSessionNewLoadedFiles ![FilePath] deriving instance Show Log instance Pretty Log where pretty = \case + LogSessionNewLoadedFiles files -> + "New loaded files:" <+> pretty files LogNoneCradleFound path -> "None cradle found for" <+> pretty path <+> ", ignoring the file" LogSettingInitialDynFlags -> @@ -425,7 +428,7 @@ loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions - loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] cradle_files <- newIORef (Set.fromList []) --- error_loading_files <- newIORef (Set.fromList []) + error_loading_files <- newIORef (Set.fromList []) -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv @@ -603,19 +606,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - - return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) - let makeError hieYaml cradle err cfp = do - dep_info <- getDependencyInfo (maybeToList hieYaml) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml - return (fst res) - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp @@ -630,12 +622,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + pendingFiles' <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + -- remove the file from error loading files + errorFiles <- readIORef error_loading_files + -- remove error files from pending files since error loading need to load one by one + let pendingFiles = pendingFiles' `Set.difference` errorFiles + -- if the file is in error loading files, we fall back to single loading mode + let toLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles + eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ pendingFiles <> old_files) + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ toLoads <> old_files) addTag "result" (show res) return res @@ -649,20 +648,37 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (results, newLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) -- put back to pending que if not listed in the results - let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` newLoaded + -- delete cfp even if ew report No cradle target found for cfp + let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded + let newLoadedT = pendingFiles `Set.intersection` allNewLoaded atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) - atomicModifyIORef' cradle_files (\xs -> (pendingFiles `Set.intersection` newLoaded <> xs,())) + -- log new loaded files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT + atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) + atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - let failedLoadingFiles = nub $ cfp:concatMap cradleErrorLoadingFiles err - let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` Set.fromList failedLoadingFiles - atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) - errors <- mapM (makeError hieYaml cradle err) $ failedLoadingFiles - return ((concat errors, Nothing), maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + if (length toLoads > 1) + then do + succLoaded_files <- readIORef cradle_files + -- mark as less loaded files as failedLoadingFiles possible + let failedLoadingFiles = (Set.insert cfp toLoads) `Set.difference` succLoaded_files + atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) + -- retry without other files + atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) + consultCradle hieYaml cfp + else do + dep_info <- getDependencyInfo (maybeToList hieYaml) + let ncfp = toNormalizedFilePath' cfp + let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) + void $ modifyVar' fileToFlags $ + Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) + void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let -- | We allow users to specify a loading strategy. @@ -703,6 +719,10 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do deps_ok <- checkDependencyInfo old_di if not deps_ok then do + -- todo invoke the action to recompile the file + -- if deps are old, we can try to load the error files again + atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) + atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) -- If the dependencies are out of date then clear both caches and start -- again. modifyVar_ fileToFlags (const (return Map.empty)) diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index ac18ff2025..a8e35e5965 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -27,7 +27,7 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms _fps) cradle nfp +renderCradleError (CradleError deps _ec ms) cradle nfp | HieBios.isCabalCradle cradle = let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) From 8953aec8f4eac9f8c87b6ddf955eeb383ebcf959 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:23:51 +0800 Subject: [PATCH 05/90] typo --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 1dc4135923..9eac2ce279 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -665,7 +665,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do if (length toLoads > 1) then do succLoaded_files <- readIORef cradle_files - -- mark as less loaded files as failedLoadingFiles possible + -- mark as less loaded files as failedLoadingFiles as possible let failedLoadingFiles = (Set.insert cfp toLoads) `Set.difference` succLoaded_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files From 4bdc2c87c8aead0b14a988e9c0b19b8d2d735558 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:24:59 +0800 Subject: [PATCH 06/90] update inline doc --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 9eac2ce279..7df8fc0240 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -623,7 +623,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do <> " (for " <> T.pack lfpLog <> ")" pendingFiles' <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) - -- remove the file from error loading files errorFiles <- readIORef error_loading_files -- remove error files from pending files since error loading need to load one by one let pendingFiles = pendingFiles' `Set.difference` errorFiles @@ -656,6 +655,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT + -- remove the file from error loading files atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) return results From c4bb53a267c5173394ce330f33e84d6da497541a Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:26:22 +0800 Subject: [PATCH 07/90] update inline doc --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 7df8fc0240..70a882b337 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -438,8 +438,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- they are inconsistent. So, everywhere you modify 'fileToFlags', -- you have to modify 'filesMap' as well. filesMap <- newVar HM.empty :: IO (Var FilesMap) - pendingFilesTQueue <- newTQueueIO -- Pending files waiting to be loaded + pendingFilesTQueue <- newTQueueIO -- Version of the mappings above version <- newVar 0 biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) From 112bc951555bf0c1e542ad05586457d351e079af Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 21:36:03 +0800 Subject: [PATCH 08/90] add LogSessionReloadOnError to log errors during file reloads; cleanup error loading and cradle files --- ghcide/session-loader/Development/IDE/Session.hs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 70a882b337..793c6b3669 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -147,10 +147,13 @@ data Log | LogHieBios HieBios.Log | LogSessionLoadingChanged | LogSessionNewLoadedFiles ![FilePath] + | LogSessionReloadOnError FilePath ![FilePath] deriving instance Show Log instance Pretty Log where pretty = \case + LogSessionReloadOnError path files -> + "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files LogSessionNewLoadedFiles files -> "New loaded files:" <+> pretty files LogNoneCradleFound path -> @@ -649,14 +652,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do | compileTime == runTime -> do (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) -- put back to pending que if not listed in the results - -- delete cfp even if ew report No cradle target found for cfp + -- delete cfp even if we report No cradle target found for the cfp let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded let newLoadedT = pendingFiles `Set.intersection` allNewLoaded atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT - -- remove the file from error loading files atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) + -- remove the file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) @@ -711,6 +714,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do modifyVar_ filesMap (const (return HM.empty)) -- Don't even keep the name cache, we start from scratch here! modifyVar_ hscEnvs (const (return Map.empty)) + -- cleanup error loading files and cradle files + atomicModifyIORef' error_loading_files (\_ -> (Set.empty,())) + atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags let cfp = toAbsolutePath file From 6e04d289fe57145153128b44bf1aacb42992456b Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 4 Nov 2024 23:35:57 +0800 Subject: [PATCH 09/90] refactor loadSessionWithOptions to improve error handling and clarify variable names --- ghcide/session-loader/Development/IDE/Session.hs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 793c6b3669..bcf29f85b4 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -630,13 +630,13 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- remove error files from pending files since error loading need to load one by one let pendingFiles = pendingFiles' `Set.difference` errorFiles -- if the file is in error loading files, we fall back to single loading mode - let toLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles + let extraToLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog old_files <- readIORef cradle_files - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ toLoads <> old_files) + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ extraToLoads <> old_files) addTag "result" (show res) return res @@ -660,16 +660,16 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) -- remove the file from error loading files - atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` newLoadedT, ())) + atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do - if (length toLoads > 1) + if (not $ null extraToLoads) then do succLoaded_files <- readIORef cradle_files -- mark as less loaded files as failedLoadingFiles as possible - let failedLoadingFiles = (Set.insert cfp toLoads) `Set.difference` succLoaded_files + let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` succLoaded_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) @@ -681,6 +681,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do void $ modifyVar' fileToFlags $ Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) let From 67aebc42b01d46c9f699cd4a4f045c548c0960c2 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 5 Nov 2024 02:03:00 +0800 Subject: [PATCH 10/90] refactor loadSessionWithOptions to improve pending file handling and error management --- .../session-loader/Development/IDE/Session.hs | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index bcf29f85b4..cb2571e046 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -625,17 +625,18 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles' <- Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) errorFiles <- readIORef error_loading_files - -- remove error files from pending files since error loading need to load one by one - let pendingFiles = pendingFiles' `Set.difference` errorFiles + old_files <- readIORef cradle_files -- if the file is in error loading files, we fall back to single loading mode - let extraToLoads = if cfp `Set.member` errorFiles then Set.empty else pendingFiles + let extraToLoads = if cfp `Set.member` errorFiles + then Set.empty + -- remove error files from pending files since error loading need to load one by one + else Set.delete cfp $ pendingFiles `Set.difference` errorFiles eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog - old_files <- readIORef cradle_files res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ extraToLoads <> old_files) addTag "result" (show res) return res @@ -654,22 +655,21 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- put back to pending que if not listed in the results -- delete cfp even if we report No cradle target found for the cfp let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded - let newLoadedT = pendingFiles `Set.intersection` allNewLoaded + let newLoaded = pendingFiles `Set.intersection` allNewLoaded atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoadedT - atomicModifyIORef' cradle_files (\xs -> (newLoadedT <> xs,())) - -- remove the file from error loading files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) + atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) return results | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) -- Failure case, either a cradle error or the none cradle Left err -> do if (not $ null extraToLoads) then do - succLoaded_files <- readIORef cradle_files -- mark as less loaded files as failedLoadingFiles as possible - let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` succLoaded_files + let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) From 98ae44677d0f4295ed2e461b838f5f938e1f4a50 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 5 Nov 2024 02:25:56 +0800 Subject: [PATCH 11/90] add doc about limitation --- ghcide/session-loader/Development/IDE/Session.hs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index cb2571e046..127af00f2d 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -669,6 +669,10 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do if (not $ null extraToLoads) then do -- mark as less loaded files as failedLoadingFiles as possible + -- limitation is that when we are loading files, and the dependencies of old_files + -- are changed, and old_files are not valid anymore. + -- but they will still be in the old_files, and will not move to error_loading_files. + -- And make other files failed to load in batch mode. let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files @@ -726,7 +730,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do deps_ok <- checkDependencyInfo old_di if not deps_ok then do - -- todo invoke the action to recompile the file -- if deps are old, we can try to load the error files again atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) From f3eb580d1217f8fde81d2dc334df22482b6588a3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 5 Nov 2024 16:51:10 +0800 Subject: [PATCH 12/90] absolute file at the beginning --- ghcide/session-loader/Development/IDE/Session.hs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 127af00f2d..57c9a73024 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -677,6 +677,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList pendingFiles) consultCradle hieYaml cfp else do dep_info <- getDependencyInfo (maybeToList hieYaml) @@ -724,8 +725,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags - let cfp = toAbsolutePath file - case HM.lookup (toNormalizedFilePath' cfp) v of + case HM.lookup (toNormalizedFilePath' file) v of Just (opts, old_di) -> do deps_ok <- checkDependencyInfo old_di if not deps_ok @@ -739,9 +739,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do modifyVar_ filesMap (const (return HM.empty)) -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) - consultCradle hieYaml cfp + consultCradle hieYaml file else return (opts, Map.keys old_di) - Nothing -> consultCradle hieYaml cfp + Nothing -> consultCradle hieYaml file -- The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try @@ -749,16 +749,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- before attempting to do so. let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) getOptions file = do - let ncfp = toNormalizedFilePath' (toAbsolutePath file) + let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap hieYaml <- cradleLoc file sessionOpts (join cachedHieYamlLocation <|> hieYaml, file) `Safe.catch` \e -> return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) returnWithVersion $ \file -> do - atomically $ writeTQueue pendingFilesTQueue $ toAbsolutePath file + let absFile = toAbsolutePath file + atomically $ writeTQueue pendingFilesTQueue absFile -- see Note [Serializing runs in separate thread] - awaitRunInThread que $ getOptions file + awaitRunInThread que $ getOptions absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From e7bd3d42045fb9680c23f995ff8b98c63a4772c8 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 8 Nov 2024 01:45:56 +0800 Subject: [PATCH 13/90] run session loader and worker in sperate --- ghcide/ghcide.cabal | 2 + .../session-loader/Development/IDE/Session.hs | 137 +++++++++++++----- .../Development/IDE/Session/OrderedSet.hs | 39 +++++ 3 files changed, 141 insertions(+), 37 deletions(-) create mode 100644 ghcide/session-loader/Development/IDE/Session/OrderedSet.hs diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index bf88a55ed3..81e33aa2fa 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -110,6 +110,7 @@ library , unliftio-core , unordered-containers >=0.2.10.0 , vector + , ListT if os(windows) build-depends: Win32 @@ -204,6 +205,7 @@ library Development.IDE.GHC.CPP Development.IDE.GHC.Warnings Development.IDE.Types.Action + Development.IDE.Session.OrderedSet if flag(pedantic) ghc-options: diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 57c9a73024..6cbf6ea370 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -25,6 +25,7 @@ import Control.Exception.Safe as Safe import Control.Monad import Control.Monad.Extra as Extra import Control.Monad.IO.Class +import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H import Data.Aeson hiding (Error) import Data.Bifunctor @@ -103,8 +104,7 @@ import qualified Data.HashSet as Set import qualified Data.Set as OS import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) -import Development.IDE.Core.WorkerThread (awaitRunInThread, - withWorkerQueue) +import Development.IDE.Core.WorkerThread (withWorkerQueue) import qualified Development.IDE.GHC.Compat.Util as Compat import Development.IDE.Session.Diagnostics (renderCradleError) import Development.IDE.Types.Shake (WithHieDb, @@ -119,12 +119,17 @@ import qualified System.Random as Random import System.Random (RandomGen) import Text.ParserCombinators.ReadP (readP_to_S) +import Control.Concurrent.STM (STM) +import qualified Control.Monad.STM as STM +import qualified Development.IDE.Session.OrderedSet as S +import qualified Focus import GHC.Data.Bag import GHC.Driver.Env (hsc_all_home_unit_ids) import GHC.Driver.Errors.Types import GHC.Types.Error (errMsgDiagnostic, singleMessage) import GHC.Unit.State +import qualified StmContainers.Map as STM data Log = LogSettingInitialDynFlags @@ -148,10 +153,14 @@ data Log | LogSessionLoadingChanged | LogSessionNewLoadedFiles ![FilePath] | LogSessionReloadOnError FilePath ![FilePath] + | LogGetOptionsLoop !FilePath + | LogGetSessionRetry !FilePath deriving instance Show Log instance Pretty Log where pretty = \case + LogGetSessionRetry path -> "Retrying get session for" <+> pretty path + LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp LogSessionReloadOnError path files -> "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files LogSessionNewLoadedFiles files -> @@ -435,14 +444,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file hscEnvs <- newVar Map.empty :: IO (Var HieMap) -- Mapping from a Filepath to HscEnv - fileToFlags <- newVar Map.empty :: IO (Var FlagsMap) + fileToFlags <- STM.newIO :: IO FlagsMap -- Mapping from a Filepath to its 'hie.yaml' location. -- Should hold the same Filepaths as 'fileToFlags', otherwise -- they are inconsistent. So, everywhere you modify 'fileToFlags', -- you have to modify 'filesMap' as well. - filesMap <- newVar HM.empty :: IO (Var FilesMap) + filesMap <- STM.newIO :: IO FilesMap -- Pending files waiting to be loaded - pendingFilesTQueue <- newTQueueIO + pendingFileSet <- S.newIO :: IO (S.OrderedSet FilePath) -- Version of the mappings above version <- newVar 0 biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) @@ -559,7 +568,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ((IdeResult HscEnvEq,[FilePath]), HashSet FilePath) + -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath) session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -589,8 +598,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" ] - void $ modifyVar' fileToFlags $ Map.insert hieYaml this_flags_map - void $ modifyVar' filesMap $ flip HM.union (HM.fromList (map ((,hieYaml) . fst) $ concatMap toFlagsMap all_targets)) + let insertAll m xs = mapM_ (flip (uncurry STM.insert) m) xs + atomically $ do + STM.insert this_flags_map hieYaml fileToFlags + insertAll filesMap $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + -- Typecheck all files in the project on startup checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. @@ -609,9 +621,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - return $ (second Map.keys this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) + return $ (this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp logWith recorder Info $ LogCradlePath lfpLog @@ -625,7 +637,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ flushTQueue pendingFilesTQueue) + pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ S.toUnOrderedList pendingFileSet) errorFiles <- readIORef error_loading_files old_files <- readIORef cradle_files -- if the file is in error loading files, we fall back to single loading mode @@ -652,18 +664,20 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ((runTime, _):_) | compileTime == runTime -> do (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) - -- put back to pending que if not listed in the results -- delete cfp even if we report No cradle target found for the cfp - let remainPendingFiles = Set.delete cfp $ pendingFiles `Set.difference` allNewLoaded let newLoaded = pendingFiles `Set.intersection` allNewLoaded - atomically $ forM_ remainPendingFiles (writeTQueue pendingFilesTQueue) + -- delete all new loaded + atomically $ forM_ allNewLoaded $ flip S.delete pendingFileSet -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) return results - | otherwise -> return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) + | otherwise -> do + -- delete cfp from pending files + atomically $ S.delete cfp pendingFileSet + return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),Map.empty) -- Failure case, either a cradle error or the none cradle Left err -> do if (not $ null extraToLoads) @@ -676,18 +690,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files - atomically $ forM_ pendingFiles (writeTQueue pendingFilesTQueue) logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList pendingFiles) consultCradle hieYaml cfp else do - dep_info <- getDependencyInfo (maybeToList hieYaml) + dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) let ncfp = toNormalizedFilePath' cfp let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - void $ modifyVar' fileToFlags $ - Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) - void $ modifyVar' filesMap $ HM.insert ncfp hieYaml + -- remove cfp from pending files + atomically $ S.delete cfp pendingFileSet + atomically $ do + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp (res, dep_info))) hieYaml fileToFlags + STM.insert hieYaml ncfp filesMap atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) - return (res, maybe [] pure hieYaml ++ concatMap cradleErrorDependencies err) + return (res, dep_info) let -- | We allow users to specify a loading strategy. @@ -710,21 +725,22 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) - -> IO (IdeResult HscEnvEq, [FilePath]) + -> IO (IdeResult HscEnvEq, DependencyInfo) sessionOpts (hieYaml, file) = do Extra.whenM didSessionLoadingPreferenceConfigChange $ do logWith recorder Info LogSessionLoadingChanged -- If the dependencies are out of date then clear both caches and start -- again. - modifyVar_ fileToFlags (const (return Map.empty)) - modifyVar_ filesMap (const (return HM.empty)) + atomically $ do + STM.reset filesMap + STM.reset fileToFlags -- Don't even keep the name cache, we start from scratch here! modifyVar_ hscEnvs (const (return Map.empty)) -- cleanup error loading files and cradle files atomicModifyIORef' error_loading_files (\_ -> (Set.empty,())) atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) - v <- Map.findWithDefault HM.empty hieYaml <$> readVar fileToFlags + v <- atomically $ fromMaybe HM.empty <$> STM.lookup hieYaml fileToFlags case HM.lookup (toNormalizedFilePath' file) v of Just (opts, old_di) -> do deps_ok <- checkDependencyInfo old_di @@ -735,31 +751,77 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) -- If the dependencies are out of date then clear both caches and start -- again. - modifyVar_ fileToFlags (const (return Map.empty)) - modifyVar_ filesMap (const (return HM.empty)) + atomically $ do + STM.reset filesMap + STM.reset fileToFlags -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) consultCradle hieYaml file - else return (opts, Map.keys old_di) + else return (opts, old_di) Nothing -> consultCradle hieYaml file + let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) + checkInCache ncfp = runMaybeT $ do + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp filesMap + m <- MaybeT $ STM.lookup cachedHieYamlLocation fileToFlags + MaybeT $ pure $ HM.lookup ncfp m + -- The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + let getOptions :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) getOptions file = do let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- HM.lookup ncfp <$> readVar filesMap + cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file - sessionOpts (join cachedHieYamlLocation <|> hieYaml, file) `Safe.catch` \e -> - return (([renderPackageSetupException file e], Nothing), maybe [] pure hieYaml) - + let hieLoc = join cachedHieYamlLocation <|> hieYaml + result <- sessionOpts (hieLoc, file) `Safe.catch` \e -> do + dep <- getDependencyInfo $ maybe [] pure hieYaml + return (([renderPackageSetupException file e], Nothing), dep) + atomically $ STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp result)) hieLoc fileToFlags + return result + + let getOptionsLoop :: IO () + getOptionsLoop = do + -- Get the next file to load + absFile <- atomically $ S.readQueue pendingFileSet + logWith recorder Info (LogGetOptionsLoop absFile) + void $ getOptions absFile + getOptionsLoop + + let getSessionRetry :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + getSessionRetry absFile = do + let ncfp = toNormalizedFilePath' absFile + -- check if in the cache + res <- atomically $ checkInCache ncfp + logWith recorder Info $ LogGetSessionRetry absFile + updateDateRes <- case res of + Just r -> do + depOk <- checkDependencyInfo (snd r) + if depOk + then return $ Just r + else return Nothing + _ -> return Nothing + case updateDateRes of + Just r -> return r + Nothing -> do + -- if not ok, we need to reload the session + atomically $ do + S.insert absFile pendingFileSet + atomically $ do + -- wait until pendingFiles is not in pendingFiles + Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + getSessionRetry absFile + + -- Start the getOptionsLoop if the queue is empty + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file - atomically $ writeTQueue pendingFilesTQueue absFile + second Map.keys <$> getSessionRetry absFile + -- atomically $ writeTQueue pendingFiles absFile -- see Note [Serializing runs in separate thread] - awaitRunInThread que $ getOptions absFile + -- awaitRunInThread que $ second Map.keys <$> getOptions absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the @@ -1034,10 +1096,11 @@ setCacheDirs recorder CacheDirs{..} dflags = do type DependencyInfo = Map.Map FilePath (Maybe UTCTime) type HieMap = Map.Map (Maybe FilePath) [RawComponentInfo] -- | Maps a "hie.yaml" location to all its Target Filepaths and options. -type FlagsMap = Map.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) +type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) -- | Maps a Filepath to its respective "hie.yaml" location. -- It aims to be the reverse of 'FlagsMap'. -type FilesMap = HM.HashMap NormalizedFilePath (Maybe FilePath) +type FilesMap = STM.Map NormalizedFilePath (Maybe FilePath) + -- This is pristine information about a component data RawComponentInfo = RawComponentInfo diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs new file mode 100644 index 0000000000..e1a5f123c2 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -0,0 +1,39 @@ +module Development.IDE.Session.OrderedSet where + +import Control.Concurrent.STM (STM, TQueue, newTQueueIO) +import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) +import Data.Hashable (Hashable) +import qualified ListT as LT +import qualified StmContainers.Set as S +import StmContainers.Set (Set) + + +type OrderedSet a = (TQueue a, Set a) + +insert :: Hashable a => a -> OrderedSet a -> STM () +insert a (que, s) = do + S.insert a s + writeTQueue que a + return () + +newIO :: Hashable a => IO (OrderedSet a) +newIO = do + que <- newTQueueIO + s <- S.newIO + return (que, s) + +readQueue :: Hashable a => OrderedSet a -> STM a +readQueue rs@(que, s) = do + f <- readTQueue que + b <- S.lookup f s + -- retry if the file is already in done + if b then return f else readQueue rs + +lookup :: Hashable a => a -> OrderedSet a -> STM Bool +lookup a (_, s) = S.lookup a s + +delete :: Hashable a => a -> OrderedSet a -> STM () +delete a (_, s) = S.delete a s + +toUnOrderedList :: Hashable a => OrderedSet a -> STM [a] +toUnOrderedList (_, s) = LT.toList $ S.listT s From 1f97c401b5aa5cc86c1e52d397bcd91154662a88 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 8 Nov 2024 02:26:23 +0800 Subject: [PATCH 14/90] cleanup --- .../session-loader/Development/IDE/Session.hs | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 6cbf6ea370..99ca786506 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -790,8 +790,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do void $ getOptions absFile getOptionsLoop - let getSessionRetry :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) - getSessionRetry absFile = do + -- | Given a file, this function will return the HscEnv and the dependencies + -- it would look up the cache first, if the cache is not available, it would + -- submit a request to the getOptionsLoop to get the options for the file + -- and wait until the options are available + let lookupOrWaitCache :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + lookupOrWaitCache absFile = do let ncfp = toNormalizedFilePath' absFile -- check if in the cache res <- atomically $ checkInCache ncfp @@ -807,21 +811,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Just r -> return r Nothing -> do -- if not ok, we need to reload the session - atomically $ do - S.insert absFile pendingFileSet - atomically $ do - -- wait until pendingFiles is not in pendingFiles - Extra.whenM (S.lookup absFile pendingFileSet) STM.retry - getSessionRetry absFile + atomically $ S.insert absFile pendingFileSet + -- wait until pendingFiles is not in pendingFiles + atomically $ Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + lookupOrWaitCache absFile + -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file - second Map.keys <$> getSessionRetry absFile - -- atomically $ writeTQueue pendingFiles absFile - -- see Note [Serializing runs in separate thread] - -- awaitRunInThread que $ second Map.keys <$> getOptions absFile + second Map.keys <$> lookupOrWaitCache absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From 4c998bd487e48dcf85abbb14cc58d217c5dafd6a Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 8 Nov 2024 15:24:17 +0800 Subject: [PATCH 15/90] rename LogGetSessionRetry to LogLookupSessionCache for clarity in logging --- ghcide/session-loader/Development/IDE/Session.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 99ca786506..c6d2dcbb84 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -154,12 +154,12 @@ data Log | LogSessionNewLoadedFiles ![FilePath] | LogSessionReloadOnError FilePath ![FilePath] | LogGetOptionsLoop !FilePath - | LogGetSessionRetry !FilePath + | LogLookupSessionCache !FilePath deriving instance Show Log instance Pretty Log where pretty = \case - LogGetSessionRetry path -> "Retrying get session for" <+> pretty path + LogLookupSessionCache path -> "Looking up session cache for" <+> pretty path LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp LogSessionReloadOnError path files -> "Reloading file due to error in" <+> pretty path <+> "with files:" <+> pretty files @@ -799,7 +799,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' absFile -- check if in the cache res <- atomically $ checkInCache ncfp - logWith recorder Info $ LogGetSessionRetry absFile + logWith recorder Info $ LogLookupSessionCache absFile updateDateRes <- case res of Just r -> do depOk <- checkDependencyInfo (snd r) From 79a43a0cbfa32a226c831a7eb9da0279d9049ab7 Mon Sep 17 00:00:00 2001 From: Patrick Date: Sat, 9 Nov 2024 18:34:13 +0800 Subject: [PATCH 16/90] extract attempt to load files from errors --- cabal.project | 5 +++++ ghcide/session-loader/Development/IDE/Session.hs | 8 +++++--- .../session-loader/Development/IDE/Session/Diagnostics.hs | 2 +- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/cabal.project b/cabal.project index 2c872ed46f..2b46365f1f 100644 --- a/cabal.project +++ b/cabal.project @@ -46,3 +46,8 @@ constraints: if impl(ghc >= 9.9) -- https://github.com/haskell/haskell-language-server/issues/4324 benchmarks: False + +source-repository-package + type: git + location: https://github.com/soulomoon/hie-bios.git + tag: 93582c21372af573e5103bad198777a3317a2df2 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index c6d2dcbb84..f3bbc4d899 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -680,17 +680,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),Map.empty) -- Failure case, either a cradle error or the none cradle Left err -> do - if (not $ null extraToLoads) + let attemptToLoadFiles = (Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err) + `Set.difference` old_files + if (not $ null attemptToLoadFiles) then do -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to error_loading_files. -- And make other files failed to load in batch mode. - let failedLoadingFiles = (Set.insert cfp extraToLoads) `Set.difference` old_files + let failedLoadingFiles = (Set.insert cfp attemptToLoadFiles) atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) -- retry without other files - logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList pendingFiles) + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp else do dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) diff --git a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs index a8e35e5965..8b1136c0c8 100644 --- a/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs +++ b/ghcide/session-loader/Development/IDE/Session/Diagnostics.hs @@ -27,7 +27,7 @@ data CradleErrorDetails = Depicts the cradle error in a user-friendly way. -} renderCradleError :: CradleError -> Cradle a -> NormalizedFilePath -> FileDiagnostic -renderCradleError (CradleError deps _ec ms) cradle nfp +renderCradleError (CradleError deps _ec ms _attemptToLoadFiles) cradle nfp | HieBios.isCabalCradle cradle = let (fp, showDiag, diag) = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) nfp $ T.unlines $ map T.pack userFriendlyMessage in (fp, showDiag, diag{_data_ = Just $ Aeson.toJSON CradleErrorDetails{cabalProjectFiles=absDeps}}) From beb1764608b01d8e659ce38ad914474f98880f50 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 12 Nov 2024 19:53:35 +0800 Subject: [PATCH 17/90] refactor session loading to wait for pending files before cache check --- ghcide/session-loader/Development/IDE/Session.hs | 9 +++++---- .../session-loader/Development/IDE/Session/OrderedSet.hs | 7 ++++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index f3bbc4d899..c47cb7b381 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -799,8 +799,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let lookupOrWaitCache :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) lookupOrWaitCache absFile = do let ncfp = toNormalizedFilePath' absFile - -- check if in the cache - res <- atomically $ checkInCache ncfp + res <- atomically $ do + -- wait until target file is not in pendingFiles + Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + -- check if in the cache + checkInCache ncfp logWith recorder Info $ LogLookupSessionCache absFile updateDateRes <- case res of Just r -> do @@ -814,8 +817,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing -> do -- if not ok, we need to reload the session atomically $ S.insert absFile pendingFileSet - -- wait until pendingFiles is not in pendingFiles - atomically $ Extra.whenM (S.lookup absFile pendingFileSet) STM.retry lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index e1a5f123c2..ff67abd8b1 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -2,7 +2,9 @@ module Development.IDE.Session.OrderedSet where import Control.Concurrent.STM (STM, TQueue, newTQueueIO) import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) +import Control.Monad (when) import Data.Hashable (Hashable) +import qualified Focus import qualified ListT as LT import qualified StmContainers.Set as S import StmContainers.Set (Set) @@ -12,9 +14,8 @@ type OrderedSet a = (TQueue a, Set a) insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do - S.insert a s - writeTQueue que a - return () + (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s + when inserted $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do From 61395222f11eb3c1751daf437b936f41ef712961 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 19 Nov 2024 05:18:13 +0800 Subject: [PATCH 18/90] add LogTime to logging for improved time tracking during session loading --- .../session-loader/Development/IDE/Session.hs | 58 +++++++++++-------- 1 file changed, 33 insertions(+), 25 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index c47cb7b381..2b75329c1b 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -155,10 +155,12 @@ data Log | LogSessionReloadOnError FilePath ![FilePath] | LogGetOptionsLoop !FilePath | LogLookupSessionCache !FilePath + | LogTime !String deriving instance Show Log instance Pretty Log where pretty = \case + LogTime s -> "Time:" <+> pretty s LogLookupSessionCache path -> "Looking up session cache for" <+> pretty path LogGetOptionsLoop fp -> "Loop: getOptions for" <+> pretty fp LogSessionReloadOnError path files -> @@ -582,7 +584,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') all_targets' = concat all_target_details - newLoaded = HM.keys flags_map' this_dep_info <- getDependencyInfo $ maybeToList hieYaml let (all_targets, this_flags_map, this_options) = case HM.lookup _cfp flags_map' of @@ -599,9 +600,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ] let insertAll m xs = mapM_ (flip (uncurry STM.insert) m) xs + newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map atomically $ do STM.insert this_flags_map hieYaml fileToFlags insertAll filesMap $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete pendingFileSet -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -621,9 +624,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - return $ (this_options, Set.fromList $ fromNormalizedFilePath <$> newLoaded) + return $ (this_options, newLoaded) - let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + let consultCradle :: Maybe FilePath -> FilePath -> IO () consultCradle hieYaml cfp = do let lfpLog = makeRelative rootDir cfp logWith recorder Info $ LogCradlePath lfpLog @@ -658,32 +661,36 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- The cradle gave us some options so get to work turning them -- into and HscEnv. Right (opts, libDir, version) -> do + let ncfp = toNormalizedFilePath' cfp let compileTime = fullCompilerVersion case reverse $ readP_to_S parseVersion version of [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (results, allNewLoaded) <- session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) - -- delete cfp even if we report No cradle target found for the cfp + (_results, allNewLoaded) <- session (hieYaml, ncfp, opts, libDir) let newLoaded = pendingFiles `Set.intersection` allNewLoaded - -- delete all new loaded - atomically $ forM_ allNewLoaded $ flip S.delete pendingFileSet -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) - return results | otherwise -> do -- delete cfp from pending files - atomically $ S.delete cfp pendingFileSet - return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),Map.empty) + atomically $ do + STM.focus (Focus.insertOrMerge HM.union + (HM.singleton ncfp (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty))) + hieYaml fileToFlags + STM.insert hieYaml ncfp filesMap + S.delete cfp pendingFileSet -- Failure case, either a cradle error or the none cradle Left err -> do let attemptToLoadFiles = (Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err) `Set.difference` old_files if (not $ null attemptToLoadFiles) + then do + -- we are loading more files and failed, we need to retry + -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. @@ -695,16 +702,16 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp else do + -- we are only loading this file and it failed dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) let ncfp = toNormalizedFilePath' cfp let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) -- remove cfp from pending files - atomically $ S.delete cfp pendingFileSet + atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) atomically $ do STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp (res, dep_info))) hieYaml fileToFlags - STM.insert hieYaml ncfp filesMap - atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) - return (res, dep_info) + STM.insert hieYaml ncfp filesMap + S.delete cfp pendingFileSet let -- | We allow users to specify a loading strategy. @@ -727,7 +734,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) - -> IO (IdeResult HscEnvEq, DependencyInfo) + -> IO () sessionOpts (hieYaml, file) = do Extra.whenM didSessionLoadingPreferenceConfigChange $ do logWith recorder Info LogSessionLoadingChanged @@ -744,10 +751,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do v <- atomically $ fromMaybe HM.empty <$> STM.lookup hieYaml fileToFlags case HM.lookup (toNormalizedFilePath' file) v of - Just (opts, old_di) -> do + Just (_opts, old_di) -> do deps_ok <- checkDependencyInfo old_di - if not deps_ok - then do + when (not deps_ok) $ do -- if deps are old, we can try to load the error files again atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) @@ -759,7 +765,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) consultCradle hieYaml file - else return (opts, old_di) Nothing -> consultCradle hieYaml file let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) @@ -772,24 +777,27 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) + let getOptions :: FilePath -> IO () getOptions file = do let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file let hieLoc = join cachedHieYamlLocation <|> hieYaml - result <- sessionOpts (hieLoc, file) `Safe.catch` \e -> do + sessionOpts (hieLoc, file) `Safe.catch` \e -> do dep <- getDependencyInfo $ maybe [] pure hieYaml - return (([renderPackageSetupException file e], Nothing), dep) - atomically $ STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp result)) hieLoc fileToFlags - return result + let errorResult = (([renderPackageSetupException file e], Nothing), dep) + atomically $ do + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp errorResult)) hieLoc fileToFlags + STM.insert hieYaml ncfp filesMap + -- delete file from pending files + S.delete file pendingFileSet let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load absFile <- atomically $ S.readQueue pendingFileSet logWith recorder Info (LogGetOptionsLoop absFile) - void $ getOptions absFile + getOptions absFile getOptionsLoop -- | Given a file, this function will return the HscEnv and the dependencies From 73145097fbff80f27d7d8d6411411a96de97bf22 Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 19 Nov 2024 18:13:49 +0800 Subject: [PATCH 19/90] refactor session loading to handle dependency checks more clearly --- ghcide/session-loader/Development/IDE/Session.hs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 2b75329c1b..02f3988f29 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -753,7 +753,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do case HM.lookup (toNormalizedFilePath' file) v of Just (_opts, old_di) -> do deps_ok <- checkDependencyInfo old_di - when (not deps_ok) $ do + if (not deps_ok) + then do -- if deps are old, we can try to load the error files again atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) @@ -765,6 +766,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- Keep the same name cache modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) consultCradle hieYaml file + -- if deps are ok, we can just remove the file from pending files + else atomically $ S.delete file pendingFileSet Nothing -> consultCradle hieYaml file let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) From cddcc55b9bbe40659ba5e7f25a3584ce20c41c8a Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 20 Nov 2024 07:14:55 +0800 Subject: [PATCH 20/90] Refactors session loading logic Renames getOptions to getOptionsWorker for clarity Removes redundant getOptionsLoop function Ensures session loading is called under the same `Action` context --- ghcide/session-loader/Development/IDE/Session.hs | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 02f3988f29..74eabcc021 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -780,8 +780,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO () - getOptions file = do + let getOptionsWorker :: FilePath -> IO () + getOptionsWorker file = do + logWith recorder Info (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file @@ -795,14 +796,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- delete file from pending files S.delete file pendingFileSet - let getOptionsLoop :: IO () - getOptionsLoop = do - -- Get the next file to load - absFile <- atomically $ S.readQueue pendingFileSet - logWith recorder Info (LogGetOptionsLoop absFile) - getOptions absFile - getOptionsLoop - -- | Given a file, this function will return the HscEnv and the dependencies -- it would look up the cache first, if the cache is not available, it would -- submit a request to the getOptionsLoop to get the options for the file @@ -828,11 +821,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing -> do -- if not ok, we need to reload the session atomically $ S.insert absFile pendingFileSet + -- line up the session to load + atomically $ writeTQueue que (getOptionsWorker absFile) lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty - liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file second Map.keys <$> lookupOrWaitCache absFile From bb78a36f473aa7439203d6e33e71d2b3a9a7fada Mon Sep 17 00:00:00 2001 From: Patrick Date: Sat, 7 Dec 2024 03:52:07 +0800 Subject: [PATCH 21/90] delay the restart --- .../session-loader/Development/IDE/Session.hs | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 74eabcc021..9b31bb0188 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -570,7 +570,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath) + -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath, IO ()) session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -610,21 +610,21 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - restartShakeSession VFSUnmodified "new component" [] $ do - keys2 <- invalidateShakeCache - keys1 <- extendKnownTargets all_targets - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - return [keys1, keys2] - return $ (this_options, newLoaded) + let restart = restartShakeSession VFSUnmodified "new component" [] $ do + keys2 <- invalidateShakeCache + keys1 <- extendKnownTargets all_targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] + return (this_options, newLoaded, restart) let consultCradle :: Maybe FilePath -> FilePath -> IO () consultCradle hieYaml cfp = do @@ -667,13 +667,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (_results, allNewLoaded) <- session (hieYaml, ncfp, opts, libDir) + (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) let newLoaded = pendingFiles `Set.intersection` allNewLoaded -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) + restart | otherwise -> do -- delete cfp from pending files atomically $ do @@ -782,7 +783,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- before attempting to do so. let getOptionsWorker :: FilePath -> IO () getOptionsWorker file = do - logWith recorder Info (LogGetOptionsLoop file) + logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file @@ -808,7 +809,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Extra.whenM (S.lookup absFile pendingFileSet) STM.retry -- check if in the cache checkInCache ncfp - logWith recorder Info $ LogLookupSessionCache absFile + logWith recorder Debug $ LogLookupSessionCache absFile updateDateRes <- case res of Just r -> do depOk <- checkDependencyInfo (snd r) From 58ec7eac149a4504d4084519a97bb3ffa255595b Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 18 Feb 2025 05:16:01 +0800 Subject: [PATCH 22/90] re-inline the old file instead of loading it twice --- ghcide/session-loader/Development/IDE/Session.hs | 16 +++++++++++----- .../Development/IDE/Session/OrderedSet.hs | 14 +++++++++++--- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 1b19561c54..3ec7db2e6c 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -772,9 +772,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptionsWorker :: FilePath -> IO () - getOptionsWorker file = do - logWith recorder Debug (LogGetOptionsLoop file) + let getOptions :: FilePath -> IO () + getOptions file = do let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap hieYaml <- cradleLoc file @@ -788,6 +787,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- delete file from pending files S.delete file pendingFileSet + let getOptionsLoop :: IO () + getOptionsLoop = do + -- Get the next file to load + absFile <- atomically $ S.readQueue pendingFileSet + logWith recorder Debug (LogGetOptionsLoop absFile) + getOptions absFile + getOptionsLoop + -- | Given a file, this function will return the HscEnv and the dependencies -- it would look up the cache first, if the cache is not available, it would -- submit a request to the getOptionsLoop to get the options for the file @@ -813,12 +820,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing -> do -- if not ok, we need to reload the session atomically $ S.insert absFile pendingFileSet - -- line up the session to load - atomically $ writeTQueue que (getOptionsWorker absFile) lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop returnWithVersion $ \file -> do let absFile = toAbsolutePath file second Map.keys <$> lookupOrWaitCache absFile diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index ff67abd8b1..a2b0a76565 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -1,6 +1,7 @@ module Development.IDE.Session.OrderedSet where -import Control.Concurrent.STM (STM, TQueue, newTQueueIO) +import Control.Concurrent.STM (STM, TQueue, flushTQueue, + newTQueueIO) import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) import Control.Monad (when) import Data.Hashable (Hashable) @@ -15,7 +16,14 @@ type OrderedSet a = (TQueue a, Set a) insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s - when inserted $ writeTQueue que a + -- if already in the set + -- update the position of the element in the queue + when (not inserted) $ do + items <- filter (==a) <$> flushTQueue que + mapM_ (writeTQueue que) items + return () + writeTQueue que a + -- when que $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do @@ -27,7 +35,7 @@ readQueue :: Hashable a => OrderedSet a -> STM a readQueue rs@(que, s) = do f <- readTQueue que b <- S.lookup f s - -- retry if the file is already in done + -- retry if no files are left in the queue if b then return f else readQueue rs lookup :: Hashable a => a -> OrderedSet a -> STM Bool From d9439637ec7f5ccee4491bb9fb396aef4d3a44f3 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 19 Feb 2025 01:05:49 +0800 Subject: [PATCH 23/90] update upload artifact action version --- .github/workflows/bench.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 659352e4e6..b9d6d49059 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,14 +101,14 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 @@ -134,13 +134,13 @@ jobs: enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz From 5d015001976a1562b0f9ca612ab44a494656c081 Mon Sep 17 00:00:00 2001 From: Patrick Date: Wed, 26 Feb 2025 05:01:54 +0800 Subject: [PATCH 24/90] update hie-bios tag --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 2e32d6715f..b45f1ba86d 100644 --- a/cabal.project +++ b/cabal.project @@ -66,4 +66,4 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git location: https://github.com/soulomoon/hie-bios.git - tag: 93582c21372af573e5103bad198777a3317a2df2 \ No newline at end of file + tag: 84febb04ba152b03fd42b551ffb2ea6e7506cf9b From 10a6f7e7c69dfc150aff34e0c6cebc37a127eca6 Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 27 Feb 2025 05:17:54 +0800 Subject: [PATCH 25/90] Update hie-bios tag to latest commit --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index b45f1ba86d..54c46e6eca 100644 --- a/cabal.project +++ b/cabal.project @@ -66,4 +66,4 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git location: https://github.com/soulomoon/hie-bios.git - tag: 84febb04ba152b03fd42b551ffb2ea6e7506cf9b + tag: 3351cfc5becee6a09df47df4772598fb2207b746 From 45b124137d7111274bcb06470ed855bb0377b8eb Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 3 Mar 2025 00:18:43 +0800 Subject: [PATCH 26/90] update hie-bios --- cabal.project | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cabal.project b/cabal.project index 54c46e6eca..794ccb4fb2 100644 --- a/cabal.project +++ b/cabal.project @@ -65,5 +65,5 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git - location: https://github.com/soulomoon/hie-bios.git - tag: 3351cfc5becee6a09df47df4772598fb2207b746 + location: https://github.com/haskell/hie-bios + tag: bc502c94b891719f07e5ada9f6f59ca4ba8e08ff From 219db463049bfac6408f6cde3f9a0b3262a9e059 Mon Sep 17 00:00:00 2001 From: Patrick Date: Mon, 3 Mar 2025 01:19:42 +0800 Subject: [PATCH 27/90] update index-state to reflect the latest project state --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 794ccb4fb2..4c9d2b25e0 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2024-12-02T00:00:00Z +index-state: 2025-03-02T16:10:12Z tests: True test-show-details: direct From 2f86db0b0abd5f86af42ec96a0b2e7f4da077a1f Mon Sep 17 00:00:00 2001 From: Patrick Date: Tue, 4 Mar 2025 00:13:35 +0800 Subject: [PATCH 28/90] update index-state to reflect the new date --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 4c9d2b25e0..794ccb4fb2 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2025-03-02T16:10:12Z +index-state: 2024-12-02T00:00:00Z tests: True test-show-details: direct From de98232569ab0104b5ad34a7fbf1e49316e63375 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:08:57 +0800 Subject: [PATCH 29/90] update fourmolu dependency version constraints --- haskell-language-server.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index dcbb546733..eece96f992 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1497,7 +1497,7 @@ library hls-fourmolu-plugin build-depends: , base >=4.12 && <5 , filepath - , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 + , fourmolu ^>= 0.14 || ^>= 0.15 || >= 0.16 && < 0.16.2 , ghc-boot-th , ghcide == 2.9.0.1 , hls-plugin-api == 2.9.0.1 From f0a881d1a5b24ca648230e53ef63d28c14524a49 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:23:24 +0800 Subject: [PATCH 30/90] remove ListT from library dependencies --- ghcide/ghcide.cabal | 1 - 1 file changed, 1 deletion(-) diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 3b88a2024c..1468128d9a 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -111,7 +111,6 @@ library , unliftio-core , unordered-containers >=0.2.10.0 , vector - , ListT if os(windows) build-depends: Win32 From 14f6a3b93a693e1daf141a2f9172c32509fcb166 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:23:46 +0800 Subject: [PATCH 31/90] update hie-bios to a new tag --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index 794ccb4fb2..66fa8a3ff8 100644 --- a/cabal.project +++ b/cabal.project @@ -66,4 +66,4 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) source-repository-package type: git location: https://github.com/haskell/hie-bios - tag: bc502c94b891719f07e5ada9f6f59ca4ba8e08ff + tag: 65df091a4464b43043661d70c1dcfed1ed1fa9a9 From 8b0e246a9be0f513813ac8728801ecd3b3a81873 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:24:01 +0800 Subject: [PATCH 32/90] update fourmolu dependency version constraints to allow 0.16 --- haskell-language-server.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index eece96f992..dcbb546733 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1497,7 +1497,7 @@ library hls-fourmolu-plugin build-depends: , base >=4.12 && <5 , filepath - , fourmolu ^>= 0.14 || ^>= 0.15 || >= 0.16 && < 0.16.2 + , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 , ghc-boot-th , ghcide == 2.9.0.1 , hls-plugin-api == 2.9.0.1 From 2dd71c00f30032d30e339f250c6c334e5a978f29 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:34:50 +0800 Subject: [PATCH 33/90] add allow-newer constraint for Cabal-syntax --- cabal.project | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cabal.project b/cabal.project index 66fa8a3ff8..f46df91127 100644 --- a/cabal.project +++ b/cabal.project @@ -63,6 +63,8 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) constraints: ghc-lib-parser==9.8.4.20241130 +allow-newer: + Cabal-syntax source-repository-package type: git location: https://github.com/haskell/hie-bios From b8406d60b991a8d92435405579a77ff6f80bbf5b Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:37:53 +0800 Subject: [PATCH 34/90] remove allow-newer constraint for Cabal-syntax --- cabal.project | 2 -- 1 file changed, 2 deletions(-) diff --git a/cabal.project b/cabal.project index f46df91127..66fa8a3ff8 100644 --- a/cabal.project +++ b/cabal.project @@ -63,8 +63,6 @@ if impl(ghc >= 9.8.4) && impl(ghc < 9.8.5) constraints: ghc-lib-parser==9.8.4.20241130 -allow-newer: - Cabal-syntax source-repository-package type: git location: https://github.com/haskell/hie-bios From 5ea3d87b8ef711949a4fb73eab2472bc21dc19cd Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:50:27 +0800 Subject: [PATCH 35/90] bump actions/checkout and actions/upload-artifact to v3 --- .github/workflows/bench.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index b9d6d49059..659352e4e6 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,14 +101,14 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 @@ -134,13 +134,13 @@ jobs: enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v3 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz From 3e0c27b1de952c84cd4f8598907be79d0be4d735 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 06:53:05 +0800 Subject: [PATCH 36/90] Revert "bump actions/checkout and actions/upload-artifact to v3" This reverts commit 5ea3d87b8ef711949a4fb73eab2472bc21dc19cd. --- .github/workflows/bench.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 659352e4e6..b9d6d49059 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: # change of the strategy may require changing the bootstrapping/run code steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # By default, the `pull_request` event has a `GITHUB_SHA` env variable @@ -101,14 +101,14 @@ jobs: tar -czf cabal.tar.gz * - name: Upload workspace - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 path: workspace.tar.gz - name: Upload .cabal - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} retention-days: 1 @@ -134,13 +134,13 @@ jobs: enable-stack: false - name: Download cabal home - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: cabal-home-${{ matrix.ghc }}-${{ matrix.os }} path: . - name: Download workspace - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: workspace-${{ matrix.ghc }}-${{ matrix.os }} path: . @@ -165,7 +165,7 @@ jobs: run: find bench-results -name "*.csv" -or -name "*.svg" -or -name "*.html" | xargs tar -czf benchmark-artifacts.tar.gz - name: Archive benchmarking artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-results-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-artifacts.tar.gz @@ -175,7 +175,7 @@ jobs: run: find bench-results -name "*.log" -or -name "*.hp" | xargs tar -czf benchmark-logs.tar.gz - name: Archive benchmark logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: bench-logs-${{ matrix.example }}-${{ runner.os }}-${{ matrix.ghc }} path: benchmark-logs.tar.gz From 8c27e3479bd4ba7fd699ba76396a8f4419d60ce6 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 7 Mar 2025 22:21:37 +0800 Subject: [PATCH 37/90] add allow-older constraint for optparse-applicative in cabal.project --- cabal.project | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cabal.project b/cabal.project index 66fa8a3ff8..efc8e3a895 100644 --- a/cabal.project +++ b/cabal.project @@ -67,3 +67,5 @@ source-repository-package type: git location: https://github.com/haskell/hie-bios tag: 65df091a4464b43043661d70c1dcfed1ed1fa9a9 +-- see https://github.com/haskell/haskell-language-server/pull/4445#issuecomment-2705787655 +allow-older: hie-bios:optparse-applicative From b0af63434ea35f4c40911a630f043c4eb51215cf Mon Sep 17 00:00:00 2001 From: Patrick Date: Thu, 24 Apr 2025 19:03:06 +0800 Subject: [PATCH 38/90] update hie-bios --- cabal.project | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cabal.project b/cabal.project index c69496e295..e54c7f4bfe 100644 --- a/cabal.project +++ b/cabal.project @@ -56,7 +56,7 @@ allow-newer: source-repository-package type: git location: https://github.com/haskell/hie-bios - tag: 65df091a4464b43043661d70c1dcfed1ed1fa9a9 + tag: e372a62b780b1314a35238a698a9e3813096b122 -- see https://github.com/haskell/haskell-language-server/pull/4445#issuecomment-2705787655 allow-older: hie-bios:optparse-applicative From 06fa5de52bd4b10e28bab42c9c076f409b356d43 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 27 Apr 2025 16:19:26 +0800 Subject: [PATCH 39/90] introduce SessionState --- .../session-loader/Development/IDE/Session.hs | 259 ++++++++++++------ 1 file changed, 173 insertions(+), 86 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index fd50fa5bc0..76d10c9d66 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -418,6 +418,125 @@ getHieDbLoc dir = do createDirectoryIfMissing True cDir pure (cDir db) +data SessionState = SessionState + { cradle_files :: !(IORef (HashSet FilePath)) + , error_loading_files :: !(IORef (HashSet FilePath)) + , hscEnvs :: !(Var HieMap) + , fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))) + , filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)) + , pendingFileSet :: !(S.OrderedSet FilePath) + , version :: !(Var Int) + } + +-- | Helper functions for SessionState management +-- These functions encapsulate common operations on the SessionState + +-- | Add a file to the set of files with errors during loading +addErrorLoadingFile :: SessionState -> FilePath -> IO () +addErrorLoadingFile state file = + atomicModifyIORef' (error_loading_files state) (\xs -> (Set.insert file xs, ())) + +addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () +addErrorLoadingFiles = mapM_ . addErrorLoadingFile + +-- | Remove a file from the set of files with errors during loading +removeErrorLoadingFile :: SessionState -> FilePath -> IO () +removeErrorLoadingFile state file = + atomicModifyIORef' (error_loading_files state) (\xs -> (Set.delete file xs, ())) + +addCradleFiles :: SessionState -> HashSet FilePath -> IO () +addCradleFiles state files = + atomicModifyIORef' (cradle_files state) (\xs -> (files <> xs, ())) + +-- | Remove a file from the cradle files set +removeCradleFile :: SessionState -> FilePath -> IO () +removeCradleFile state file = + atomicModifyIORef' (cradle_files state) (\xs -> (Set.delete file xs, ())) + +-- | Clear error loading files and reset to empty set +clearErrorLoadingFiles :: SessionState -> IO () +clearErrorLoadingFiles state = + atomicModifyIORef' (error_loading_files state) (\_ -> (Set.empty, ())) + +-- | Clear cradle files and reset to empty set +clearCradleFiles :: SessionState -> IO () +clearCradleFiles state = + atomicModifyIORef' (cradle_files state) (\_ -> (Set.empty, ())) + +-- | Reset the file maps in the session state +resetFileMaps :: SessionState -> STM () +resetFileMaps state = do + STM.reset (filesMap state) + STM.reset (fileToFlags state) + +-- | Insert or update file flags for a specific hieYaml and normalized file path +insertFileFlags :: SessionState -> Maybe FilePath -> NormalizedFilePath -> (IdeResult HscEnvEq, DependencyInfo) -> STM () +insertFileFlags state hieYaml ncfp flags = + STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp flags)) hieYaml (fileToFlags state) + +-- | Insert a file mapping from normalized path to hieYaml location +insertFileMapping :: SessionState -> Maybe FilePath -> NormalizedFilePath -> STM () +insertFileMapping state hieYaml ncfp = + STM.insert hieYaml ncfp (filesMap state) + +-- | Remove a file from the pending file set +removeFromPending :: SessionState -> FilePath -> STM () +removeFromPending state file = + S.delete file (pendingFileSet state) + +-- | Add a file to the pending file set +addToPending :: SessionState -> FilePath -> STM () +addToPending state file = + S.insert file (pendingFileSet state) + +-- | Common pattern: Insert file flags, insert file mapping, and remove from pending +completeFileProcessing :: SessionState -> Maybe FilePath -> NormalizedFilePath -> FilePath -> (IdeResult HscEnvEq, DependencyInfo) -> IO () +completeFileProcessing state hieYaml ncfp file flags = do +-- remove cfp from pending files + addErrorLoadingFile state file + removeCradleFile state file + atomically $ do + insertFileFlags state hieYaml ncfp flags + insertFileMapping state hieYaml ncfp + removeFromPending state file + +-- | Insert multiple file mappings at once +insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () +insertAllFileMappings state mappings = + mapM_ (\(yaml, path) -> insertFileMapping state yaml path) mappings + +-- | Increment the version counter +incrementVersion :: SessionState -> IO Int +incrementVersion state = modifyVar' (version state) succ + +-- | Get files from the pending file set +getPendingFiles :: SessionState -> IO (HashSet FilePath) +getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFileSet state) + +-- | Handle errors during session loading by recording file as having error and removing from pending +handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () +handleSessionError state hieYaml file e = do + dep <- getDependencyInfo $ maybe [] pure hieYaml + let ncfp = toNormalizedFilePath' file + let errorResult = (([renderPackageSetupException file e], Nothing), dep) + completeFileProcessing state hieYaml ncfp file errorResult + +-- | Get the set of extra files to load based on the current file path +-- If the current file is in error loading files, we fallback to single loading mode (empty set) +-- Otherwise, we remove error files from pending files and also exclude the current file +getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] +getExtraFilesToLoad state cfp = do + pendingFiles <- getPendingFiles state + errorFiles <- readIORef (error_loading_files state) + old_files <- readIORef (cradle_files state) + -- if the file is in error loading files, we fall back to single loading mode + return $ + Set.toList $ + if cfp `Set.member` errorFiles + then Set.empty + -- remove error files from pending files since error loading need to load one by one + else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files + -- | Given a root directory, return a Shake 'Action' which setups an -- 'IdeGhcSession' given a file. -- Some of the many things this does: @@ -435,23 +554,20 @@ getHieDbLoc dir = do loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions -> FilePath -> TQueue (IO ()) -> IO (Action IdeGhcSession) loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - cradle_files <- newIORef (Set.fromList []) - error_loading_files <- newIORef (Set.fromList []) - -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file - hscEnvs <- newVar Map.empty :: IO (Var HieMap) - -- Mapping from a Filepath to HscEnv - fileToFlags <- STM.newIO :: IO FlagsMap - -- Mapping from a Filepath to its 'hie.yaml' location. - -- Should hold the same Filepaths as 'fileToFlags', otherwise - -- they are inconsistent. So, everywhere you modify 'fileToFlags', - -- you have to modify 'filesMap' as well. - filesMap <- STM.newIO :: IO FilesMap - -- Pending files waiting to be loaded - pendingFileSet <- S.newIO :: IO (S.OrderedSet FilePath) - -- Version of the mappings above - version <- newVar 0 + + -- Initialize SessionState + sessionState <- SessionState + <$> newIORef (Set.fromList []) -- cradle_files + <*> newIORef (Set.fromList []) -- error_loading_files + <*> newVar Map.empty -- hscEnvs + <*> STM.newIO -- fileToFlags + <*> STM.newIO -- filesMap + <*> S.newIO -- pendingFileSet + <*> newVar 0 -- version + biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) - let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar version) + let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) + -- This caches the mapping from Mod.hs -> hie.yaml cradleLoc <- liftIO $ memoIO $ \v -> do res <- findCradle v @@ -466,7 +582,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do extras@ShakeExtras{restartShakeSession, ideNc, knownTargetsVar, lspEnv } <- getShakeExtras let invalidateShakeCache = do - void $ modifyVar' version succ + void $ incrementVersion sessionState return $ toNoFileKey GhcSessionIO IdeOptions{ optTesting = IdeTesting optTesting @@ -523,7 +639,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv -- (unitId, DynFlag, Targets) - modifyVar hscEnvs $ \m -> do + modifyVar (hscEnvs sessionState) $ \m -> do -- Just deps if there's already an HscEnv -- Nothing is it's the first time we are making an HscEnv let oldDeps = Map.lookup hieYaml m @@ -594,12 +710,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - let insertAll m xs = mapM_ (flip (uncurry STM.insert) m) xs - newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map + let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map atomically $ do - STM.insert this_flags_map hieYaml fileToFlags - insertAll filesMap $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete pendingFileSet + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -635,19 +750,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - pendingFiles <- Set.insert cfp . Set.fromList <$> (atomically $ S.toUnOrderedList pendingFileSet) - errorFiles <- readIORef error_loading_files - old_files <- readIORef cradle_files - -- if the file is in error loading files, we fall back to single loading mode - let extraToLoads = if cfp `Set.member` errorFiles - then Set.empty - -- remove error files from pending files since error loading need to load one by one - else Set.delete cfp $ pendingFiles `Set.difference` errorFiles - + extraToLoads <- getExtraFilesToLoad sessionState cfp eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp (Set.toList $ Set.delete cfp $ extraToLoads <> old_files) + res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp extraToLoads addTag "result" (show res) return res @@ -663,51 +770,42 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ((runTime, _):_) | compileTime == runTime -> do (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) + pendingFiles <- getPendingFiles sessionState let newLoaded = pendingFiles `Set.intersection` allNewLoaded -- log new loaded files logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files - atomicModifyIORef' error_loading_files (\old -> (old `Set.difference` allNewLoaded, ())) - atomicModifyIORef' cradle_files (\xs -> (newLoaded <> xs,())) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) + addCradleFiles sessionState newLoaded restart | otherwise -> do - -- delete cfp from pending files - atomically $ do - STM.focus (Focus.insertOrMerge HM.union - (HM.singleton ncfp (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty))) - hieYaml fileToFlags - STM.insert hieYaml ncfp filesMap - S.delete cfp pendingFileSet + -- Use the common pattern here: updateFileState + completeFileProcessing sessionState hieYaml ncfp cfp + (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty) -- Failure case, either a cradle error or the none cradle Left err -> do - let attemptToLoadFiles = (Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err) - `Set.difference` old_files - if (not $ null attemptToLoadFiles) - + -- what if the error to load file is one of old_files ? + let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err + old_files <- readIORef (cradle_files sessionState) + let errorToLoadNewFiles = attemptToLoadFiles `Set.difference` old_files + if not (null errorToLoadNewFiles) then do -- we are loading more files and failed, we need to retry - -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to error_loading_files. -- And make other files failed to load in batch mode. - let failedLoadingFiles = (Set.insert cfp attemptToLoadFiles) - atomicModifyIORef' error_loading_files (\xs -> (failedLoadingFiles <> xs,())) + addErrorLoadingFiles sessionState (Set.toList errorToLoadNewFiles) -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp else do -- we are only loading this file and it failed - dep_info <- getDependencyInfo ((maybeToList hieYaml) ++ concatMap cradleErrorDependencies err) + dep_info <- getDependencyInfo (maybeToList hieYaml ++ concatMap cradleErrorDependencies err) let ncfp = toNormalizedFilePath' cfp let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - -- remove cfp from pending files - atomicModifyIORef' error_loading_files (\xs -> (Set.insert cfp xs,())) - atomically $ do - STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp (res, dep_info))) hieYaml fileToFlags - STM.insert hieYaml ncfp filesMap - S.delete cfp pendingFileSet + completeFileProcessing sessionState hieYaml ncfp cfp (res, dep_info) let -- | We allow users to specify a loading strategy. @@ -736,40 +834,36 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do logWith recorder Info LogSessionLoadingChanged -- If the dependencies are out of date then clear both caches and start -- again. - atomically $ do - STM.reset filesMap - STM.reset fileToFlags + atomically $ resetFileMaps sessionState -- Don't even keep the name cache, we start from scratch here! - modifyVar_ hscEnvs (const (return Map.empty)) + modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) -- cleanup error loading files and cradle files - atomicModifyIORef' error_loading_files (\_ -> (Set.empty,())) - atomicModifyIORef' cradle_files (\_ -> (Set.empty,())) + clearErrorLoadingFiles sessionState + clearCradleFiles sessionState - v <- atomically $ fromMaybe HM.empty <$> STM.lookup hieYaml fileToFlags - case HM.lookup (toNormalizedFilePath' file) v of + v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) + case v >>= HM.lookup (toNormalizedFilePath' file) of Just (_opts, old_di) -> do deps_ok <- checkDependencyInfo old_di - if (not deps_ok) + if not deps_ok then do -- if deps are old, we can try to load the error files again - atomicModifyIORef' error_loading_files (\xs -> (Set.delete file xs,())) - atomicModifyIORef' cradle_files (\xs -> (Set.delete file xs,())) + removeErrorLoadingFile sessionState file + removeCradleFile sessionState file -- If the dependencies are out of date then clear both caches and start -- again. - atomically $ do - STM.reset filesMap - STM.reset fileToFlags + atomically $ resetFileMaps sessionState -- Keep the same name cache - modifyVar_ hscEnvs (return . Map.adjust (const []) hieYaml ) + modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) consultCradle hieYaml file -- if deps are ok, we can just remove the file from pending files - else atomically $ S.delete file pendingFileSet + else atomically $ removeFromPending sessionState file Nothing -> consultCradle hieYaml file let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) checkInCache ncfp = runMaybeT $ do - cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp filesMap - m <- MaybeT $ STM.lookup cachedHieYamlLocation fileToFlags + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) + m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) MaybeT $ pure $ HM.lookup ncfp m -- The main function which gets options for a file. We only want one of these running @@ -779,22 +873,15 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let getOptions :: FilePath -> IO () getOptions file = do let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- atomically $ STM.lookup ncfp filesMap + cachedHieYamlLocation <- atomically $ STM.lookup ncfp (filesMap sessionState) hieYaml <- cradleLoc file let hieLoc = join cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` \e -> do - dep <- getDependencyInfo $ maybe [] pure hieYaml - let errorResult = (([renderPackageSetupException file e], Nothing), dep) - atomically $ do - STM.focus (Focus.insertOrMerge HM.union (HM.singleton ncfp errorResult)) hieLoc fileToFlags - STM.insert hieYaml ncfp filesMap - -- delete file from pending files - S.delete file pendingFileSet + sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load - absFile <- atomically $ S.readQueue pendingFileSet + absFile <- atomically $ S.readQueue (pendingFileSet sessionState) logWith recorder Debug (LogGetOptionsLoop absFile) getOptions absFile getOptionsLoop @@ -808,7 +895,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' absFile res <- atomically $ do -- wait until target file is not in pendingFiles - Extra.whenM (S.lookup absFile pendingFileSet) STM.retry + Extra.whenM (S.lookup absFile (pendingFileSet sessionState)) STM.retry -- check if in the cache checkInCache ncfp logWith recorder Debug $ LogLookupSessionCache absFile @@ -823,7 +910,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Just r -> return r Nothing -> do -- if not ok, we need to reload the session - atomically $ S.insert absFile pendingFileSet + atomically $ addToPending sessionState absFile lookupOrWaitCache absFile -- see Note [Serializing runs in separate thread] From 80d016094abba178ecff317b186bfe3c52517c43 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sun, 27 Apr 2025 22:45:14 +0800 Subject: [PATCH 40/90] update hiebois --- cabal.project | 9 +----- ghcide/ghcide.cabal | 2 +- .../session-loader/Development/IDE/Session.hs | 32 +++++++++++-------- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/cabal.project b/cabal.project index e54c7f4bfe..59f565677b 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2025-04-19T07:34:07Z +index-state: 2025-04-26T07:34:07Z tests: True test-show-details: direct @@ -53,13 +53,6 @@ allow-newer: cabal-install-parsers:Cabal-syntax, -source-repository-package - type: git - location: https://github.com/haskell/hie-bios - tag: e372a62b780b1314a35238a698a9e3813096b122 --- see https://github.com/haskell/haskell-language-server/pull/4445#issuecomment-2705787655 -allow-older: hie-bios:optparse-applicative - if impl(ghc >= 9.11) benchmarks: False allow-newer: diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 0ae7b15ce9..eed0ed5919 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -73,7 +73,7 @@ library , Glob , haddock-library >=1.8 && <1.12 , hashable - , hie-bios ^>=0.14.0 + , hie-bios ^>=0.15.0 , hie-compat ^>=0.3.0.0 , hiedb ^>= 0.6.0.2 , hls-graph == 2.10.0.0 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 76d10c9d66..a153e15119 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -500,6 +500,23 @@ completeFileProcessing state hieYaml ncfp file flags = do insertFileMapping state hieYaml ncfp removeFromPending state file +-- | Handle successful loading by updating session state with the new file maps +updateSessionOnSuccess :: Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> [TargetDetails] -> IO () +updateSessionOnSuccess recorder state hieYaml this_flags_map all_targets = do + let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags state) + insertAllFileMappings state $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete (pendingFileSet state) + pendingFiles <- getPendingFiles state + let newLoaded = pendingFiles `Set.intersection` newLoaded + -- log new loaded files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files + mapM_ (removeErrorLoadingFile state) (Set.toList newLoaded) + addCradleFiles state newLoaded + return () + -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () insertAllFileMappings state mappings = @@ -711,11 +728,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags sessionState) - insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) - + updateSessionOnSuccess recorder sessionState hieYaml this_flags_map all_targets -- Typecheck all files in the project on startup checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. @@ -769,14 +782,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) - pendingFiles <- getPendingFiles sessionState - let newLoaded = pendingFiles `Set.intersection` allNewLoaded - -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) - addCradleFiles sessionState newLoaded + (_results, _allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) restart | otherwise -> do -- Use the common pattern here: updateFileState From 24269f6e085d1edda2c2d5c3131c3e4282ab760e Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 18:00:58 +0800 Subject: [PATCH 41/90] revert --- .../session-loader/Development/IDE/Session.hs | 32 ++++++++----------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index a153e15119..76d10c9d66 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -500,23 +500,6 @@ completeFileProcessing state hieYaml ncfp file flags = do insertFileMapping state hieYaml ncfp removeFromPending state file --- | Handle successful loading by updating session state with the new file maps -updateSessionOnSuccess :: Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> [TargetDetails] -> IO () -updateSessionOnSuccess recorder state hieYaml this_flags_map all_targets = do - let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags state) - insertAllFileMappings state $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFileSet state) - pendingFiles <- getPendingFiles state - let newLoaded = pendingFiles `Set.intersection` newLoaded - -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile state) (Set.toList newLoaded) - addCradleFiles state newLoaded - return () - -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () insertAllFileMappings state mappings = @@ -728,7 +711,11 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Nothing let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map - updateSessionOnSuccess recorder sessionState hieYaml this_flags_map all_targets + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) + -- Typecheck all files in the project on startup checkProject <- getCheckProject -- The VFS doesn't change on cradle edits, re-use the old one. @@ -782,7 +769,14 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> do - (_results, _allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) + (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) + pendingFiles <- getPendingFiles sessionState + let newLoaded = pendingFiles `Set.intersection` allNewLoaded + -- log new loaded files + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files + mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) + addCradleFiles sessionState newLoaded restart | otherwise -> do -- Use the common pattern here: updateFileState From 98999f55b674cfcd5cd649f180f9f45366b8e5dc Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 19:20:10 +0800 Subject: [PATCH 42/90] restart the shake if cabal file changed --- .../session-loader/Development/IDE/Session.hs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 76d10c9d66..96c1016399 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -840,6 +840,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- cleanup error loading files and cradle files clearErrorLoadingFiles sessionState clearCradleFiles sessionState + cacheKey <- invalidateShakeCache + restartShakeSession VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) case v >>= HM.lookup (toNormalizedFilePath' file) of @@ -870,20 +872,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. - let getOptions :: FilePath -> IO () - getOptions file = do - let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- atomically $ STM.lookup ncfp (filesMap sessionState) - hieYaml <- cradleLoc file - let hieLoc = join cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load - absFile <- atomically $ S.readQueue (pendingFileSet sessionState) - logWith recorder Debug (LogGetOptionsLoop absFile) - getOptions absFile + file <- atomically $ S.readQueue (pendingFileSet sessionState) + logWith recorder Debug (LogGetOptionsLoop file) + let ncfp = toNormalizedFilePath' file + cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) + hieYaml <- cradleLoc file + let hieLoc = cachedHieYamlLocation <|> hieYaml + sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file getOptionsLoop -- | Given a file, this function will return the HscEnv and the dependencies From 21dd23314329a989dd5069817c51549a3f761888 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 23:30:05 +0800 Subject: [PATCH 43/90] better error handling in session loader --- .../session-loader/Development/IDE/Session.hs | 65 +++++++++---------- 1 file changed, 29 insertions(+), 36 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 96c1016399..738af0944a 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -489,16 +489,6 @@ addToPending :: SessionState -> FilePath -> STM () addToPending state file = S.insert file (pendingFileSet state) --- | Common pattern: Insert file flags, insert file mapping, and remove from pending -completeFileProcessing :: SessionState -> Maybe FilePath -> NormalizedFilePath -> FilePath -> (IdeResult HscEnvEq, DependencyInfo) -> IO () -completeFileProcessing state hieYaml ncfp file flags = do --- remove cfp from pending files - addErrorLoadingFile state file - removeCradleFile state file - atomically $ do - insertFileFlags state hieYaml ncfp flags - insertFileMapping state hieYaml ncfp - removeFromPending state file -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () @@ -516,10 +506,20 @@ getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pending -- | Handle errors during session loading by recording file as having error and removing from pending handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () handleSessionError state hieYaml file e = do - dep <- getDependencyInfo $ maybe [] pure hieYaml + handleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty + +-- | Common pattern: Insert file flags, insert file mapping, and remove from pending +handleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () +handleFileProcessingError state hieYaml file diags extraDepFiles = do + addErrorLoadingFile state file + removeCradleFile state file + dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file - let errorResult = (([renderPackageSetupException file e], Nothing), dep) - completeFileProcessing state hieYaml ncfp file errorResult + let flags = ((diags, Nothing), dep) + atomically $ do + insertFileFlags state hieYaml ncfp flags + insertFileMapping state hieYaml ncfp + removeFromPending state file -- | Get the set of extra files to load based on the current file path -- If the current file is in error loading files, we fallback to single loading mode (empty set) @@ -679,8 +679,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) - let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ((IdeResult HscEnvEq,DependencyInfo), HashSet FilePath, IO ()) + let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> IO () session args@(hieYaml, _cfp, _opts, _libDir) = do (new_deps, old_deps) <- packageSetup args @@ -695,7 +694,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') all_targets' = concat all_target_details this_dep_info <- getDependencyInfo $ maybeToList hieYaml - let (all_targets, this_flags_map, this_options) + let (all_targets, this_flags_map, _this_options) = case HM.lookup _cfp flags_map' of Just this -> (all_targets', flags_map', this) Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) @@ -710,17 +709,24 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - let newLoaded = Set.fromList $ fromNormalizedFilePath <$> HM.keys this_flags_map + pendingFiles <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendingFiles `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) atomically $ do STM.insert this_flags_map hieYaml (fileToFlags sessionState) insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + -- remove all new loaded file from error loading files + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded -- Typecheck all files in the project on startup checkProject <- getCheckProject + -- The VFS doesn't change on cradle edits, re-use the old one. -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - let restart = restartShakeSession VFSUnmodified "new component" [] $ do + restartShakeSession VFSUnmodified "new component" [] $ do keys2 <- invalidateShakeCache keys1 <- extendKnownTargets all_targets unless (null new_deps || not checkProject) $ do @@ -734,7 +740,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] - return (this_options, newLoaded, restart) let consultCradle :: Maybe FilePath -> FilePath -> IO () consultCradle hieYaml cfp = do @@ -759,29 +764,19 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return res logWith recorder Debug $ LogSessionLoadingResult eopts + let ncfp = toNormalizedFilePath' cfp case eopts of -- The cradle gave us some options so get to work turning them -- into and HscEnv. Right (opts, libDir, version) -> do - let ncfp = toNormalizedFilePath' cfp let compileTime = fullCompilerVersion case reverse $ readP_to_S parseVersion version of [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) - | compileTime == runTime -> do - (_results, allNewLoaded, restart) <- session (hieYaml, ncfp, opts, libDir) - pendingFiles <- getPendingFiles sessionState - let newLoaded = pendingFiles `Set.intersection` allNewLoaded - -- log new loaded files - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile sessionState) (Set.toList allNewLoaded) - addCradleFiles sessionState newLoaded - restart + | compileTime == runTime -> session (hieYaml, ncfp, opts, libDir) | otherwise -> do -- Use the common pattern here: updateFileState - completeFileProcessing sessionState hieYaml ncfp cfp - (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing), mempty) + handleFileProcessingError sessionState hieYaml cfp [renderPackageSetupException cfp GhcVersionMismatch{..}] mempty -- Failure case, either a cradle error or the none cradle Left err -> do -- what if the error to load file is one of old_files ? @@ -802,10 +797,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do consultCradle hieYaml cfp else do -- we are only loading this file and it failed - dep_info <- getDependencyInfo (maybeToList hieYaml ++ concatMap cradleErrorDependencies err) - let ncfp = toNormalizedFilePath' cfp - let res = (map (\err' -> renderCradleError err' cradle ncfp) err, Nothing) - completeFileProcessing sessionState hieYaml ncfp cfp (res, dep_info) + let res = map (\err' -> renderCradleError err' cradle ncfp) err + handleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err let -- | We allow users to specify a loading strategy. From f140a2afc3a19f1e56babc1dd6e9a4a5b627ea7a Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 23:37:10 +0800 Subject: [PATCH 44/90] refactor error handling in loadSessionWithOptions to improve clarity and logic --- ghcide/session-loader/Development/IDE/Session.hs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 738af0944a..caa8bec577 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -782,8 +782,8 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err old_files <- readIORef (cradle_files sessionState) - let errorToLoadNewFiles = attemptToLoadFiles `Set.difference` old_files - if not (null errorToLoadNewFiles) + let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) + if length errorToLoadNewFiles > 1 then do -- we are loading more files and failed, we need to retry -- mark as less loaded files as failedLoadingFiles as possible @@ -791,7 +791,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to error_loading_files. -- And make other files failed to load in batch mode. - addErrorLoadingFiles sessionState (Set.toList errorToLoadNewFiles) + addErrorLoadingFiles sessionState errorToLoadNewFiles -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp From e339c1d3e4869aebd104cc9b0dadb80aba03ae13 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 28 Apr 2025 23:54:37 +0800 Subject: [PATCH 45/90] refactor SessionState management for improved batch loading logic --- .../session-loader/Development/IDE/Session.hs | 93 ++++++++++++------- 1 file changed, 58 insertions(+), 35 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index caa8bec577..3f20e93fc1 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -418,14 +418,33 @@ getHieDbLoc dir = do createDirectoryIfMissing True cDir pure (cDir db) +{- Note [SessionState and batch load] +SessionState manages the state for batch loading files in the session loader. + +- When a new file needs to be loaded, it is added to the pendingFiles set. +- The loader processes files from pendingFiles, attempting to load them in batches. +- If a file is already in failedFiles, it is loaded individually (single-file mode). +- Otherwise, the loader tries to load as many files as possible together (batch mode). + +On success: + - All successfully loaded files are removed from pendingFiles and failedFiles, + and added to loadedFiles. + +On failure: + - If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. + - If batch loading fails, all files attempted are added to failedFiles. + +This approach ensures efficient batch loading while isolating problematic files for individual handling. +-} + data SessionState = SessionState - { cradle_files :: !(IORef (HashSet FilePath)) - , error_loading_files :: !(IORef (HashSet FilePath)) - , hscEnvs :: !(Var HieMap) - , fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))) - , filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)) - , pendingFileSet :: !(S.OrderedSet FilePath) - , version :: !(Var Int) + { loadedFiles :: !(IORef (HashSet FilePath)), + failedFiles :: !(IORef (HashSet FilePath)), + pendingFiles :: !(S.OrderedSet FilePath), + hscEnvs :: !(Var HieMap), + fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))), + filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)), + version :: !(Var Int) } -- | Helper functions for SessionState management @@ -434,7 +453,7 @@ data SessionState = SessionState -- | Add a file to the set of files with errors during loading addErrorLoadingFile :: SessionState -> FilePath -> IO () addErrorLoadingFile state file = - atomicModifyIORef' (error_loading_files state) (\xs -> (Set.insert file xs, ())) + atomicModifyIORef' (failedFiles state) (\xs -> (Set.insert file xs, ())) addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () addErrorLoadingFiles = mapM_ . addErrorLoadingFile @@ -442,26 +461,26 @@ addErrorLoadingFiles = mapM_ . addErrorLoadingFile -- | Remove a file from the set of files with errors during loading removeErrorLoadingFile :: SessionState -> FilePath -> IO () removeErrorLoadingFile state file = - atomicModifyIORef' (error_loading_files state) (\xs -> (Set.delete file xs, ())) + atomicModifyIORef' (failedFiles state) (\xs -> (Set.delete file xs, ())) addCradleFiles :: SessionState -> HashSet FilePath -> IO () addCradleFiles state files = - atomicModifyIORef' (cradle_files state) (\xs -> (files <> xs, ())) + atomicModifyIORef' (loadedFiles state) (\xs -> (files <> xs, ())) -- | Remove a file from the cradle files set removeCradleFile :: SessionState -> FilePath -> IO () removeCradleFile state file = - atomicModifyIORef' (cradle_files state) (\xs -> (Set.delete file xs, ())) + atomicModifyIORef' (loadedFiles state) (\xs -> (Set.delete file xs, ())) -- | Clear error loading files and reset to empty set clearErrorLoadingFiles :: SessionState -> IO () clearErrorLoadingFiles state = - atomicModifyIORef' (error_loading_files state) (\_ -> (Set.empty, ())) + atomicModifyIORef' (failedFiles state) (\_ -> (Set.empty, ())) -- | Clear cradle files and reset to empty set clearCradleFiles :: SessionState -> IO () clearCradleFiles state = - atomicModifyIORef' (cradle_files state) (\_ -> (Set.empty, ())) + atomicModifyIORef' (loadedFiles state) (\_ -> (Set.empty, ())) -- | Reset the file maps in the session state resetFileMaps :: SessionState -> STM () @@ -482,12 +501,12 @@ insertFileMapping state hieYaml ncfp = -- | Remove a file from the pending file set removeFromPending :: SessionState -> FilePath -> STM () removeFromPending state file = - S.delete file (pendingFileSet state) + S.delete file (pendingFiles state) -- | Add a file to the pending file set addToPending :: SessionState -> FilePath -> STM () addToPending state file = - S.insert file (pendingFileSet state) + S.insert file (pendingFiles state) -- | Insert multiple file mappings at once @@ -501,7 +520,7 @@ incrementVersion state = modifyVar' (version state) succ -- | Get files from the pending file set getPendingFiles :: SessionState -> IO (HashSet FilePath) -getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFileSet state) +getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () @@ -527,8 +546,8 @@ handleFileProcessingError state hieYaml file diags extraDepFiles = do getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] getExtraFilesToLoad state cfp = do pendingFiles <- getPendingFiles state - errorFiles <- readIORef (error_loading_files state) - old_files <- readIORef (cradle_files state) + errorFiles <- readIORef (failedFiles state) + old_files <- readIORef (loadedFiles state) -- if the file is in error loading files, we fall back to single loading mode return $ Set.toList $ @@ -537,6 +556,19 @@ getExtraFilesToLoad state cfp = do -- remove error files from pending files since error loading need to load one by one else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files +newSessionState :: IO SessionState +newSessionState = do + -- Initialize SessionState + sessionState <- SessionState + <$> newIORef (Set.fromList []) -- loadedFiles + <*> newIORef (Set.fromList []) -- failedFiles + <*> S.newIO -- pendingFiles + <*> newVar Map.empty -- hscEnvs + <*> STM.newIO -- fileToFlags + <*> STM.newIO -- filesMap + <*> newVar 0 -- version + return sessionState + -- | Given a root directory, return a Shake 'Action' which setups an -- 'IdeGhcSession' given a file. -- Some of the many things this does: @@ -555,16 +587,7 @@ loadSessionWithOptions :: Recorder (WithPriority Log) -> SessionLoadingOptions - loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] - -- Initialize SessionState - sessionState <- SessionState - <$> newIORef (Set.fromList []) -- cradle_files - <*> newIORef (Set.fromList []) -- error_loading_files - <*> newVar Map.empty -- hscEnvs - <*> STM.newIO -- fileToFlags - <*> STM.newIO -- filesMap - <*> S.newIO -- pendingFileSet - <*> newVar 0 -- version - + sessionState <- newSessionState biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) @@ -709,13 +732,13 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - pendingFiles <- getPendingFiles sessionState + pendings <- getPendingFiles sessionState -- this_flags_map might contains files not in pendingFiles, take the intersection - let newLoaded = pendingFiles `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) atomically $ do STM.insert this_flags_map hieYaml (fileToFlags sessionState) insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFileSet sessionState) + forM_ newLoaded $ flip S.delete (pendingFiles sessionState) logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded -- remove all new loaded file from error loading files @@ -781,7 +804,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Left err -> do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readIORef (cradle_files sessionState) + old_files <- readIORef (loadedFiles sessionState) let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) if length errorToLoadNewFiles > 1 then do @@ -789,7 +812,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- mark as less loaded files as failedLoadingFiles as possible -- limitation is that when we are loading files, and the dependencies of old_files -- are changed, and old_files are not valid anymore. - -- but they will still be in the old_files, and will not move to error_loading_files. + -- but they will still be in the old_files, and will not move to failedFiles. -- And make other files failed to load in batch mode. addErrorLoadingFiles sessionState errorToLoadNewFiles -- retry without other files @@ -869,7 +892,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let getOptionsLoop :: IO () getOptionsLoop = do -- Get the next file to load - file <- atomically $ S.readQueue (pendingFileSet sessionState) + file <- atomically $ S.readQueue (pendingFiles sessionState) logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) @@ -887,7 +910,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let ncfp = toNormalizedFilePath' absFile res <- atomically $ do -- wait until target file is not in pendingFiles - Extra.whenM (S.lookup absFile (pendingFileSet sessionState)) STM.retry + Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry -- check if in the cache checkInCache ncfp logWith recorder Debug $ LogLookupSessionCache absFile From 1425289cc8fa2ece6b6382e2fb56f76a9eb692d7 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Tue, 29 Apr 2025 00:13:18 +0800 Subject: [PATCH 46/90] refactor session loading error handling for improved clarity and separation of concerns --- .../session-loader/Development/IDE/Session.hs | 39 ++++++++++++------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 3f20e93fc1..7f10528e86 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -122,7 +122,6 @@ import Control.Concurrent.STM (STM) import qualified Control.Monad.STM as STM import qualified Development.IDE.Session.OrderedSet as S import qualified Focus -import GHC.Data.Bag import GHC.Driver.Env (hsc_all_home_unit_ids) import GHC.Driver.Errors.Types import GHC.Types.Error (errMsgDiagnostic, @@ -423,20 +422,37 @@ SessionState manages the state for batch loading files in the session loader. - When a new file needs to be loaded, it is added to the pendingFiles set. - The loader processes files from pendingFiles, attempting to load them in batches. -- If a file is already in failedFiles, it is loaded individually (single-file mode). -- Otherwise, the loader tries to load as many files as possible together (batch mode). +- (SBL1) If a file is already in failedFiles, it is loaded individually (single-file mode). +- (SBL2) Otherwise, the loader tries to load as many files as possible together (batch mode). On success: - - All successfully loaded files are removed from pendingFiles and failedFiles, + - (SBL3) All successfully loaded files are removed from pendingFiles and failedFiles, and added to loadedFiles. On failure: - - If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. - - If batch loading fails, all files attempted are added to failedFiles. + - (SBL4) If loading a single file fails, it is added to failedFiles and removed from loadedFiles and pendingFiles. + - (SBL5) If batch loading fails, all files attempted are added to failedFiles. This approach ensures efficient batch loading while isolating problematic files for individual handling. -} +handleLoadingSucc :: SessionState -> HashSet FilePath -> IO () +handleLoadingSucc sessionState files = do + atomically $ forM_ (Set.toList files) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList files) + addCradleFiles sessionState files + +handleLoadingFailureBatch :: SessionState -> [FilePath] -> IO () +handleLoadingFailureBatch sessionState files = do + addErrorLoadingFiles sessionState files + +handleLoadingFailureSingle :: SessionState -> FilePath -> IO () +handleLoadingFailureSingle sessionState file = do + addErrorLoadingFile sessionState file + removeErrorLoadingFile sessionState file + atomically $ S.delete file (pendingFiles sessionState) + removeCradleFile sessionState file + data SessionState = SessionState { loadedFiles :: !(IORef (HashSet FilePath)), failedFiles :: !(IORef (HashSet FilePath)), @@ -530,15 +546,13 @@ handleSessionError state hieYaml file e = do -- | Common pattern: Insert file flags, insert file mapping, and remove from pending handleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () handleFileProcessingError state hieYaml file diags extraDepFiles = do - addErrorLoadingFile state file - removeCradleFile state file dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) + handleLoadingFailureSingle state file atomically $ do insertFileFlags state hieYaml ncfp flags insertFileMapping state hieYaml ncfp - removeFromPending state file -- | Get the set of extra files to load based on the current file path -- If the current file is in error loading files, we fallback to single loading mode (empty set) @@ -738,12 +752,9 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do atomically $ do STM.insert this_flags_map hieYaml (fileToFlags sessionState) insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - forM_ newLoaded $ flip S.delete (pendingFiles sessionState) logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - -- remove all new loaded file from error loading files - mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) - addCradleFiles sessionState newLoaded + handleLoadingSucc sessionState newLoaded -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -814,7 +825,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to failedFiles. -- And make other files failed to load in batch mode. - addErrorLoadingFiles sessionState errorToLoadNewFiles + handleLoadingFailureBatch sessionState errorToLoadNewFiles -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) consultCradle hieYaml cfp From de6eb9cefebdf27a5487d6251c79b8132a449c79 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 8 May 2025 16:39:37 +0800 Subject: [PATCH 47/90] cleanup --- ghcide/session-loader/Development/IDE/Session.hs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 7f10528e86..483487c552 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -454,13 +454,13 @@ handleLoadingFailureSingle sessionState file = do removeCradleFile sessionState file data SessionState = SessionState - { loadedFiles :: !(IORef (HashSet FilePath)), - failedFiles :: !(IORef (HashSet FilePath)), + { loadedFiles :: !(IORef (HashSet FilePath)), + failedFiles :: !(IORef (HashSet FilePath)), pendingFiles :: !(S.OrderedSet FilePath), - hscEnvs :: !(Var HieMap), - fileToFlags :: !(STM.Map (Maybe FilePath) (HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo))), - filesMap :: !(STM.Map NormalizedFilePath (Maybe FilePath)), - version :: !(Var Int) + hscEnvs :: !(Var HieMap), + fileToFlags :: !FlagsMap, + filesMap :: !FilesMap, + version :: !(Var Int) } -- | Helper functions for SessionState management From c9926d43536cf576521d19dd86d600427e2e2aba Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 8 May 2025 16:56:00 +0800 Subject: [PATCH 48/90] fix --- .../session-loader/Development/IDE/Session.hs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 7b574a492f..597d7cffaf 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -893,13 +893,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do checkInCache ncfp = runMaybeT $ do cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) - -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action - -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. - -- The GlobPattern of a FileSystemWatcher can be absolute or relative. - -- We use the absolute one because it is supported by more LSP clients. - -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. - let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath deps) - MaybeT $ pure $ absolutePathsCradleDeps <$> HM.lookup ncfp m + MaybeT $ pure $ HM.lookup ncfp m -- The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try @@ -947,9 +941,16 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop + + -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action + -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. + -- The GlobPattern of a FileSystemWatcher can be absolute or relative. + -- We use the absolute one because it is supported by more LSP clients. + -- Here we make sure deps are absolute and later we use those absolute deps as GlobPattern. + let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath $ Map.keys deps) returnWithVersion $ \file -> do let absFile = toAbsolutePath file - second Map.keys <$> lookupOrWaitCache absFile + absolutePathsCradleDeps <$> lookupOrWaitCache absFile -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From 48a46d1d084eb295383ebc040a8d68c46556edd9 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Thu, 8 May 2025 17:27:50 +0800 Subject: [PATCH 49/90] add sessionLoadingPreferenceConfig var to SessionState --- .../session-loader/Development/IDE/Session.hs | 47 ++++++++++--------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 597d7cffaf..87edfc0513 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -78,7 +78,8 @@ import Ide.Logger (Pretty (pretty), nest, toCologActionWithPrio, vcat, viaShow, (<+>)) -import Ide.Types (SessionLoadingPreferenceConfig (..), +import Ide.Types (Config, + SessionLoadingPreferenceConfig (..), sessionLoading) import Language.LSP.Protocol.Message import Language.LSP.Server @@ -460,7 +461,8 @@ data SessionState = SessionState hscEnvs :: !(Var HieMap), fileToFlags :: !FlagsMap, filesMap :: !FilesMap, - version :: !(Var Int) + version :: !(Var Int), + sessionLoadingPreferenceConfig :: !(Var (Maybe SessionLoadingPreferenceConfig)) } -- | Helper functions for SessionState management @@ -570,6 +572,24 @@ getExtraFilesToLoad state cfp = do -- remove error files from pending files since error loading need to load one by one else (Set.delete cfp $ pendingFiles `Set.difference` errorFiles) <> old_files +-- | We allow users to specify a loading strategy. +-- Check whether this config was changed since the last time we have loaded +-- a session. +-- +-- If the loading configuration changed, we likely should restart the session +-- in its entirety. +didSessionLoadingPreferenceConfigChange :: SessionState -> Config -> IO Bool +didSessionLoadingPreferenceConfigChange s clientConfig = do + let biosSessionLoadingVar = sessionLoadingPreferenceConfig s + mLoadingConfig <- readVar biosSessionLoadingVar + case mLoadingConfig of + Nothing -> do + writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure False + Just loadingConfig -> do + writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + pure (loadingConfig /= sessionLoading clientConfig) + newSessionState :: IO SessionState newSessionState = do -- Initialize SessionState @@ -581,6 +601,7 @@ newSessionState = do <*> STM.newIO -- fileToFlags <*> STM.newIO -- filesMap <*> newVar 0 -- version + <*> newVar Nothing -- sessionLoadingPreferenceConfig return sessionState -- | Given a root directory, return a Shake 'Action' which setups an @@ -602,7 +623,6 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let toAbsolutePath = toAbsolute rootDir -- see Note [Root Directory] sessionState <- newSessionState - biosSessionLoadingVar <- newVar Nothing :: IO (Var (Maybe SessionLoadingPreferenceConfig)) let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar (version sessionState)) -- This caches the mapping from Mod.hs -> hie.yaml @@ -833,31 +853,12 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do -- we are only loading this file and it failed let res = map (\err' -> renderCradleError err' cradle ncfp) err handleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err - - let - -- | We allow users to specify a loading strategy. - -- Check whether this config was changed since the last time we have loaded - -- a session. - -- - -- If the loading configuration changed, we likely should restart the session - -- in its entirety. - didSessionLoadingPreferenceConfigChange :: IO Bool - didSessionLoadingPreferenceConfigChange = do - mLoadingConfig <- readVar biosSessionLoadingVar - case mLoadingConfig of - Nothing -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) - pure False - Just loadingConfig -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) - pure (loadingConfig /= sessionLoading clientConfig) - -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) -> IO () sessionOpts (hieYaml, file) = do - Extra.whenM didSessionLoadingPreferenceConfigChange $ do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do logWith recorder Info LogSessionLoadingChanged -- If the dependencies are out of date then clear both caches and start -- again. From 702e36752cd63c91c97d4cffbe3332c11cae9881 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Wed, 14 May 2025 18:56:12 +0800 Subject: [PATCH 50/90] refactor SessionState to use Var instead of IORef for loaded and failed files --- .../session-loader/Development/IDE/Session.hs | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 87edfc0513..5d34423c6c 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -455,8 +455,8 @@ handleLoadingFailureSingle sessionState file = do removeCradleFile sessionState file data SessionState = SessionState - { loadedFiles :: !(IORef (HashSet FilePath)), - failedFiles :: !(IORef (HashSet FilePath)), + { loadedFiles :: !(Var (HashSet FilePath)), + failedFiles :: !(Var (HashSet FilePath)), pendingFiles :: !(S.OrderedSet FilePath), hscEnvs :: !(Var HieMap), fileToFlags :: !FlagsMap, @@ -471,7 +471,7 @@ data SessionState = SessionState -- | Add a file to the set of files with errors during loading addErrorLoadingFile :: SessionState -> FilePath -> IO () addErrorLoadingFile state file = - atomicModifyIORef' (failedFiles state) (\xs -> (Set.insert file xs, ())) + modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () addErrorLoadingFiles = mapM_ . addErrorLoadingFile @@ -479,26 +479,26 @@ addErrorLoadingFiles = mapM_ . addErrorLoadingFile -- | Remove a file from the set of files with errors during loading removeErrorLoadingFile :: SessionState -> FilePath -> IO () removeErrorLoadingFile state file = - atomicModifyIORef' (failedFiles state) (\xs -> (Set.delete file xs, ())) + modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) addCradleFiles :: SessionState -> HashSet FilePath -> IO () addCradleFiles state files = - atomicModifyIORef' (loadedFiles state) (\xs -> (files <> xs, ())) + modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) -- | Remove a file from the cradle files set removeCradleFile :: SessionState -> FilePath -> IO () removeCradleFile state file = - atomicModifyIORef' (loadedFiles state) (\xs -> (Set.delete file xs, ())) + modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) -- | Clear error loading files and reset to empty set clearErrorLoadingFiles :: SessionState -> IO () clearErrorLoadingFiles state = - atomicModifyIORef' (failedFiles state) (\_ -> (Set.empty, ())) + modifyVar_' (failedFiles state) (const $ return Set.empty) -- | Clear cradle files and reset to empty set clearCradleFiles :: SessionState -> IO () clearCradleFiles state = - atomicModifyIORef' (loadedFiles state) (\_ -> (Set.empty, ())) + modifyVar_' (loadedFiles state) (const $ return Set.empty) -- | Reset the file maps in the session state resetFileMaps :: SessionState -> STM () @@ -562,8 +562,8 @@ handleFileProcessingError state hieYaml file diags extraDepFiles = do getExtraFilesToLoad :: SessionState -> FilePath -> IO [FilePath] getExtraFilesToLoad state cfp = do pendingFiles <- getPendingFiles state - errorFiles <- readIORef (failedFiles state) - old_files <- readIORef (loadedFiles state) + errorFiles <- readVar (failedFiles state) + old_files <- readVar (loadedFiles state) -- if the file is in error loading files, we fall back to single loading mode return $ Set.toList $ @@ -594,8 +594,8 @@ newSessionState :: IO SessionState newSessionState = do -- Initialize SessionState sessionState <- SessionState - <$> newIORef (Set.fromList []) -- loadedFiles - <*> newIORef (Set.fromList []) -- failedFiles + <$> newVar (Set.fromList []) -- loadedFiles + <*> newVar (Set.fromList []) -- failedFiles <*> S.newIO -- pendingFiles <*> newVar Map.empty -- hscEnvs <*> STM.newIO -- fileToFlags @@ -835,7 +835,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do Left err -> do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readIORef (loadedFiles sessionState) + old_files <- readVar (loadedFiles sessionState) let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) if length errorToLoadNewFiles > 1 then do From 09213d333342398b46aba48f12764c05cd490926 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 19 May 2025 22:24:59 +0800 Subject: [PATCH 51/90] simplified --- .../session-loader/Development/IDE/Session.hs | 54 +++++++++---------- 1 file changed, 25 insertions(+), 29 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 5d34423c6c..42290e87e5 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -437,16 +437,26 @@ On failure: This approach ensures efficient batch loading while isolating problematic files for individual handling. -} -handleLoadingSucc :: SessionState -> HashSet FilePath -> IO () -handleLoadingSucc sessionState files = do - atomically $ forM_ (Set.toList files) $ flip S.delete (pendingFiles sessionState) - mapM_ (removeErrorLoadingFile sessionState) (Set.toList files) - addCradleFiles sessionState files +-- SBL3 +handleLoadingSuccBatch :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () +handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets = do + pendings <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded +-- SBL5 handleLoadingFailureBatch :: SessionState -> [FilePath] -> IO () handleLoadingFailureBatch sessionState files = do - addErrorLoadingFiles sessionState files + mapM_ (addErrorLoadingFile sessionState) files +-- SBL4 handleLoadingFailureSingle :: SessionState -> FilePath -> IO () handleLoadingFailureSingle sessionState file = do addErrorLoadingFile sessionState file @@ -473,9 +483,6 @@ addErrorLoadingFile :: SessionState -> FilePath -> IO () addErrorLoadingFile state file = modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) -addErrorLoadingFiles :: SessionState -> [FilePath] -> IO () -addErrorLoadingFiles = mapM_ . addErrorLoadingFile - -- | Remove a file from the set of files with errors during loading removeErrorLoadingFile :: SessionState -> FilePath -> IO () removeErrorLoadingFile state file = @@ -526,7 +533,6 @@ addToPending :: SessionState -> FilePath -> STM () addToPending state file = S.insert file (pendingFiles state) - -- | Insert multiple file mappings at once insertAllFileMappings :: SessionState -> [(Maybe FilePath, NormalizedFilePath)] -> STM () insertAllFileMappings state mappings = @@ -541,13 +547,13 @@ getPendingFiles :: SessionState -> IO (HashSet FilePath) getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending -handleSessionError :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () -handleSessionError state hieYaml file e = do - handleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty +handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () +handleSingleFileProcessingError' state hieYaml file e = do + handleSingleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty -- | Common pattern: Insert file flags, insert file mapping, and remove from pending -handleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () -handleFileProcessingError state hieYaml file diags extraDepFiles = do +handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () +handleSingleFileProcessingError state hieYaml file diags extraDepFiles = do dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) @@ -766,15 +772,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do ]) Nothing - pendings <- getPendingFiles sessionState - -- this_flags_map might contains files not in pendingFiles, take the intersection - let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags sessionState) - insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - handleLoadingSucc sessionState newLoaded + handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets -- Typecheck all files in the project on startup checkProject <- getCheckProject @@ -828,9 +826,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) | compileTime == runTime -> session (hieYaml, ncfp, opts, libDir) - | otherwise -> do - -- Use the common pattern here: updateFileState - handleFileProcessingError sessionState hieYaml cfp [renderPackageSetupException cfp GhcVersionMismatch{..}] mempty + | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) -- Failure case, either a cradle error or the none cradle Left err -> do -- what if the error to load file is one of old_files ? @@ -852,7 +848,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do else do -- we are only loading this file and it failed let res = map (\err' -> renderCradleError err' cradle ncfp) err - handleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err + handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err -- This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies let sessionOpts :: (Maybe FilePath, FilePath) @@ -909,7 +905,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) hieYaml <- cradleLoc file let hieLoc = cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` handleSessionError sessionState hieLoc file + sessionOpts (hieLoc, file) `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file getOptionsLoop -- | Given a file, this function will return the HscEnv and the dependencies From 256f83434a64018c55a4edddb2ca5dc6a905a564 Mon Sep 17 00:00:00 2001 From: Sean Gillespie Date: Fri, 20 Jun 2025 04:15:23 -0400 Subject: [PATCH 52/90] CI: Fix hls-eval-plugin tests for GHC-9.10 (#4638) --- .../test/testdata/TPropertyError.ghc910.expected.hs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs index e3208e37f5..87fbda03f8 100644 --- a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs @@ -10,4 +10,8 @@ module TProperty where -- errorEmptyList, called at libraries/ghc-internal/src/GHC/Internal/List.hs:96:11 in ghc-internal:GHC.Internal.List -- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List -- head, called at :1:27 in interactive:Ghci2 +-- HasCallStack backtrace: +-- collectBacktraces, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:92:13 in ghc-internal:GHC.Internal.Exception +-- toExceptionWithBacktrace, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:128:3 in ghc-internal:GHC.Internal.Exception +-- -- [] From 0a9b1cb3ed772e52904e6b5ed6e6f2b2134dfb03 Mon Sep 17 00:00:00 2001 From: Jan Hrcek <2716069+jhrcek@users.noreply.github.com> Date: Fri, 20 Jun 2025 12:31:37 +0200 Subject: [PATCH 53/90] Fix renaming data constructors with fields (resolves #2915, resolves #4083) (#4635) * Prevent renaming record fields whenever record constructor is renamed * wip * WAP * Update stack yamls, add RecordWildcard test * Looks like RecordWildcards renaming is only broken on GHC 9.6 and 9.8 * Consolidate comment, undo whitespace changes --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- cabal.project | 2 +- ghcide/ghcide.cabal | 2 +- haskell-language-server.cabal | 4 +-- .../src/Ide/Plugin/Rename.hs | 33 ++++++++++++++----- plugins/hls-rename-plugin/test/Main.hs | 7 +++- .../DataConstructorWithFields.expected.hs | 14 ++++++++ .../testdata/DataConstructorWithFields.hs | 14 ++++++++ ...uctorWithFieldsRecordWildcards.expected.hs | 5 +++ ...ataConstructorWithFieldsRecordWildcards.hs | 5 +++ stack-lts22.yaml | 2 +- stack.yaml | 2 +- 11 files changed, 75 insertions(+), 15 deletions(-) create mode 100644 plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs create mode 100644 plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs create mode 100644 plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs create mode 100644 plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs diff --git a/cabal.project b/cabal.project index 3d43dff2f4..92954ec729 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2025-06-07T14:57:40Z +index-state: 2025-06-16T09:44:13Z tests: True test-show-details: direct diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 4d4b481c14..416e389f2f 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -75,7 +75,7 @@ library , hashable , hie-bios ^>=0.15.0 , hie-compat ^>=0.3.0.0 - , hiedb ^>= 0.6.0.2 + , hiedb ^>= 0.7.0.0 , hls-graph == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , implicit-hie >= 0.1.4.0 && < 0.1.5 diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index f49c619ec1..42e8d11b60 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -407,7 +407,7 @@ library hls-call-hierarchy-plugin , containers , extra , ghcide == 2.11.0.0 - , hiedb ^>= 0.6.0.2 + , hiedb ^>= 0.7.0.0 , hls-plugin-api == 2.11.0.0 , lens , lsp >=2.7 @@ -594,7 +594,7 @@ library hls-rename-plugin , containers , ghcide == 2.11.0.0 , hashable - , hiedb ^>= 0.6.0.2 + , hiedb ^>= 0.7.0.0 , hie-compat , hls-plugin-api == 2.11.0.0 , haskell-language-server:hls-refactor-plugin diff --git a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs index 7cc1122982..2fdbee3ebc 100644 --- a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs +++ b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs @@ -25,7 +25,6 @@ import Data.List.NonEmpty (NonEmpty ((:|)), import qualified Data.Map as M import Data.Maybe import Data.Mod.Word -import qualified Data.Set as S import qualified Data.Text as T import Development.IDE (Recorder, WithPriority, usePropertyAction) @@ -42,7 +41,9 @@ import qualified Development.IDE.GHC.ExactPrint as E import Development.IDE.Plugin.CodeAction import Development.IDE.Spans.AtPoint import Development.IDE.Types.Location +import HieDb ((:.) (..)) import HieDb.Query +import HieDb.Types (RefRow (refIsGenerated)) import Ide.Plugin.Error import Ide.Plugin.Properties import Ide.PluginUtils @@ -196,6 +197,8 @@ refsAtName state nfp name = do dbRefs <- case nameModule_maybe name of Nothing -> pure [] Just mod -> liftIO $ mapMaybe rowToLoc <$> withHieDb (\hieDb -> + -- See Note [Generated references] + filter (\(refRow HieDb.:. _) -> refIsGenerated refRow) <$> findReferences hieDb True @@ -230,15 +233,29 @@ handleGetHieAst state nfp = -- which is bad (see https://github.com/haskell/haskell-language-server/issues/3799) fmap removeGenerated $ runActionE "Rename.GetHieAst" state $ useE GetHieAst nfp --- | We don't want to rename in code generated by GHC as this gives false positives. --- So we restrict the HIE file to remove all the generated code. +{- Note [Generated references] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +GHC inserts `Use`s of record constructor everywhere where its record selectors are used, +which leads to record fields being renamed whenever corresponding constructor is renamed. +see https://github.com/haskell/haskell-language-server/issues/2915 +To work around this, we filter out compiler-generated references. +-} removeGenerated :: HieAstResult -> HieAstResult -removeGenerated HAR{..} = HAR{hieAst = go hieAst,..} +removeGenerated HAR{..} = + HAR{hieAst = sourceOnlyAsts, refMap = sourceOnlyRefMap, ..} where - go :: HieASTs a -> HieASTs a - go hf = - HieASTs (fmap goAst (getAsts hf)) - goAst (Node nsi sp xs) = Node (SourcedNodeInfo $ M.restrictKeys (getSourcedNodeInfo nsi) (S.singleton SourceInfo)) sp (map goAst xs) + goAsts :: HieASTs a -> HieASTs a + goAsts (HieASTs asts) = HieASTs (fmap goAst asts) + + goAst :: HieAST a -> HieAST a + goAst (Node (SourcedNodeInfo sniMap) sp children) = + let sourceOnlyNodeInfos = SourcedNodeInfo $ M.delete GeneratedInfo sniMap + in Node sourceOnlyNodeInfos sp $ map goAst children + + sourceOnlyAsts = goAsts hieAst + -- Also need to regenerate the RefMap, because the one in HAR + -- is generated from HieASTs containing GeneratedInfo + sourceOnlyRefMap = generateReferencesMap $ getAsts sourceOnlyAsts collectWith :: (Hashable a, Eq b) => (a -> b) -> HashSet a -> [(b, HashSet a)] collectWith f = map (\(a :| as) -> (f a, HS.fromList (a:as))) . groupWith f . HS.toList diff --git a/plugins/hls-rename-plugin/test/Main.hs b/plugins/hls-rename-plugin/test/Main.hs index 5f7fb818ff..b935e6563f 100644 --- a/plugins/hls-rename-plugin/test/Main.hs +++ b/plugins/hls-rename-plugin/test/Main.hs @@ -24,6 +24,11 @@ tests :: TestTree tests = testGroup "Rename" [ goldenWithRename "Data constructor" "DataConstructor" $ \doc -> rename doc (Position 0 15) "Op" + , goldenWithRename "Data constructor with fields" "DataConstructorWithFields" $ \doc -> + rename doc (Position 1 13) "FooRenamed" + , knownBrokenForGhcVersions [GHC96, GHC98] "renaming Constructor{..} with RecordWildcard removes .." $ + goldenWithRename "Data constructor with fields" "DataConstructorWithFieldsRecordWildcards" $ \doc -> + rename doc (Position 1 13) "FooRenamed" , goldenWithRename "Exported function" "ExportedFunction" $ \doc -> rename doc (Position 2 1) "quux" , goldenWithRename "Field Puns" "FieldPuns" $ \doc -> @@ -113,7 +118,7 @@ goldenWithRename title path act = goldenWithHaskellDoc (def { plugins = M.fromList [("rename", def { plcConfig = "crossModule" .= True })] }) renamePlugin title testDataDir path "expected" "hs" act -renameExpectError :: (TResponseError Method_TextDocumentRename) -> TextDocumentIdentifier -> Position -> Text -> Session () +renameExpectError :: TResponseError Method_TextDocumentRename -> TextDocumentIdentifier -> Position -> Text -> Session () renameExpectError expectedError doc pos newName = do let params = RenameParams Nothing doc pos newName rsp <- request SMethod_TextDocumentRename params diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs new file mode 100644 index 0000000000..5fc38c7f01 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.expected.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +data Foo = FooRenamed { a :: Int, b :: Bool } + +foo1 :: Foo +foo1 = FooRenamed { a = 1, b = True } + +foo2 :: Foo +foo2 = FooRenamed 1 True + +fun1 :: Foo -> Int +fun1 FooRenamed {a} = a + +fun2 :: Foo -> Int +fun2 FooRenamed {a = i} = i diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs new file mode 100644 index 0000000000..abd8031096 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFields.hs @@ -0,0 +1,14 @@ +{-# LANGUAGE NamedFieldPuns #-} +data Foo = Foo { a :: Int, b :: Bool } + +foo1 :: Foo +foo1 = Foo { a = 1, b = True } + +foo2 :: Foo +foo2 = Foo 1 True + +fun1 :: Foo -> Int +fun1 Foo {a} = a + +fun2 :: Foo -> Int +fun2 Foo {a = i} = i diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs new file mode 100644 index 0000000000..b5dd83cecb --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.expected.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} +data Foo = FooRenamed { a :: Int, b :: Bool } + +fun :: Foo -> Int +fun FooRenamed {..} = a diff --git a/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs new file mode 100644 index 0000000000..8e624b0816 --- /dev/null +++ b/plugins/hls-rename-plugin/test/testdata/DataConstructorWithFieldsRecordWildcards.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE RecordWildCards #-} +data Foo = Foo { a :: Int, b :: Bool } + +fun :: Foo -> Int +fun Foo {..} = a diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 7306295a8a..63efc35f30 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -21,7 +21,7 @@ allow-newer-deps: extra-deps: - Diff-0.5 - floskell-0.11.1 - - hiedb-0.6.0.2 + - hiedb-0.7.0.0 - hie-bios-0.15.0 - implicit-hie-0.1.4.0 - lsp-2.7.0.0 diff --git a/stack.yaml b/stack.yaml index ba89370091..f6dd73d66a 100644 --- a/stack.yaml +++ b/stack.yaml @@ -22,7 +22,7 @@ allow-newer-deps: extra-deps: - floskell-0.11.1 - - hiedb-0.6.0.2 + - hiedb-0.7.0.0 - implicit-hie-0.1.4.0 - hie-bios-0.15.0 - hw-fingertree-0.1.2.1 From f768db08e2b737604bc72b854602a0ec244ce85d Mon Sep 17 00:00:00 2001 From: fendor Date: Sat, 21 Jun 2025 13:06:55 +0200 Subject: [PATCH 54/90] Extract top-level functions for session initialisation The session initialisation has too many implicit dependencies. To break these apart, we extract local functions and turn them into top-level definition with all parameters explicitly given. This commit only makes sure session initialisation functions are promoted to top-level definitions and tries to simplify them. The top-level definitions are lacking type signatures to make it easier to change them, but we plan to add them back. --- .../session-loader/Development/IDE/Session.hs | 641 +++++++++--------- 1 file changed, 333 insertions(+), 308 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 6060f5ca05..045bdcbc54 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -27,7 +27,7 @@ import Control.Monad.Extra as Extra import Control.Monad.IO.Class import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H -import Data.Aeson hiding (Error) +import Data.Aeson hiding (Error, Key) import Data.Bifunctor import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B @@ -59,7 +59,7 @@ import qualified Development.IDE.GHC.Compat.Core as GHC import Development.IDE.GHC.Compat.Env hiding (Logger) import Development.IDE.GHC.Compat.Units (UnitId) import Development.IDE.GHC.Util -import Development.IDE.Graph (Action) +import Development.IDE.Graph (Action, Key) import qualified Development.IDE.Session.Implicit as GhcIde import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports @@ -438,27 +438,27 @@ This approach ensures efficient batch loading while isolating problematic files -} -- SBL3 -handleLoadingSuccBatch :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () -handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets = do - pendings <- getPendingFiles sessionState - -- this_flags_map might contains files not in pendingFiles, take the intersection - let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) - atomically $ do - STM.insert this_flags_map hieYaml (fileToFlags sessionState) - insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets - logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded - atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) - mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) - addCradleFiles sessionState newLoaded +handleBatchLoadSuccess :: Foldable t => Recorder (WithPriority Log) -> SessionState -> Maybe FilePath -> HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo) -> t TargetDetails -> IO () +handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets = do + pendings <- getPendingFiles sessionState + -- this_flags_map might contains files not in pendingFiles, take the intersection + let newLoaded = pendings `Set.intersection` Set.fromList (fromNormalizedFilePath <$> HM.keys this_flags_map) + atomically $ do + STM.insert this_flags_map hieYaml (fileToFlags sessionState) + insertAllFileMappings sessionState $ map ((hieYaml,) . fst) $ concatMap toFlagsMap all_targets + logWith recorder Info $ LogSessionNewLoadedFiles $ Set.toList newLoaded + atomically $ forM_ (Set.toList newLoaded) $ flip S.delete (pendingFiles sessionState) + mapM_ (removeErrorLoadingFile sessionState) (Set.toList newLoaded) + addCradleFiles sessionState newLoaded -- SBL5 -handleLoadingFailureBatch :: SessionState -> [FilePath] -> IO () -handleLoadingFailureBatch sessionState files = do +handleBatchLoadFailure :: SessionState -> [FilePath] -> IO () +handleBatchLoadFailure sessionState files = do mapM_ (addErrorLoadingFile sessionState) files -- SBL4 -handleLoadingFailureSingle :: SessionState -> FilePath -> IO () -handleLoadingFailureSingle sessionState file = do +handleSingleLoadFailure :: SessionState -> FilePath -> IO () +handleSingleLoadFailure sessionState file = do addErrorLoadingFile sessionState file removeErrorLoadingFile sessionState file atomically $ S.delete file (pendingFiles sessionState) @@ -557,7 +557,7 @@ handleSingleFileProcessingError state hieYaml file diags extraDepFiles = do dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) - handleLoadingFailureSingle state file + handleSingleLoadFailure state file atomically $ do insertFileFlags state hieYaml ncfp flags insertFileMapping state hieYaml ncfp @@ -642,302 +642,29 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do return $ do clientConfig <- getClientConfigAction - extras@ShakeExtras{restartShakeSession, ideNc, knownTargetsVar, lspEnv + extras@ShakeExtras{ideNc, knownTargetsVar } <- getShakeExtras let invalidateShakeCache = do void $ incrementVersion sessionState return $ toNoFileKey GhcSessionIO - IdeOptions{ optTesting = IdeTesting optTesting - , optCheckProject = getCheckProject - , optExtensions - } <- getIdeOptions - - -- populate the knownTargetsVar with all the - -- files in the project so that `knownFiles` can learn about them and - -- we can generate a complete module graph - let extendKnownTargets newTargets = do - knownTargets <- concatForM newTargets $ \TargetDetails{..} -> - case targetTarget of - TargetFile f -> do - -- If a target file has multiple possible locations, then we - -- assume they are all separate file targets. - -- This happens with '.hs-boot' files if they are in the root directory of the project. - -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. - -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the - -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. - -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either - -- - -- * TargetFile Foo.hs-boot - -- * TargetModule Foo - -- - -- If we don't generate a TargetFile for each potential location, we will only have - -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' - -- and also not find 'TargetModule Foo'. - fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations - pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) - TargetModule _ -> do - found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations - return [(targetTarget, Set.fromList found)] - hasUpdate <- atomically $ do - known <- readTVar knownTargetsVar - let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) - hasUpdate = if known /= known' then Just (unhashed known') else Nothing - writeTVar knownTargetsVar known' - pure hasUpdate - for_ hasUpdate $ \x -> - logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) - return $ toNoFileKey GetKnownTargets - - -- Create a new HscEnv from a hieYaml root and a set of options - let packageSetup :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) - -> IO ([ComponentInfo], [ComponentInfo]) - packageSetup (hieYaml, cfp, opts, libDir) = do - -- Parse DynFlags for the newly discovered component - hscEnv <- emptyHscEnv ideNc libDir - newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir - let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- getDependencyInfo deps - -- Now lookup to see whether we are combining with an existing HscEnv - -- or making a new one. The lookup returns the HscEnv and a list of - -- information about other components loaded into the HscEnv - -- (unitId, DynFlag, Targets) - modifyVar (hscEnvs sessionState) $ \m -> do - -- Just deps if there's already an HscEnv - -- Nothing is it's the first time we are making an HscEnv - let oldDeps = Map.lookup hieYaml m - let -- Add the raw information about this component to the list - -- We will modify the unitId and DynFlags used for - -- compilation but these are the true source of - -- information. - new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newTargetDfs - all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps - -- Get all the unit-ids for things in this component - _inplace = map rawComponentUnitId $ NE.toList all_deps - - all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do - let prefix = show rawComponentUnitId - -- See Note [Avoiding bad interface files] - let cacheDirOpts = componentOptions opts - cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts - processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags - -- The final component information, mostly the same but the DynFlags don't - -- contain any packages which are also loaded - -- into the same component. - pure $ ComponentInfo - { componentUnitId = rawComponentUnitId - , componentDynFlags = processed_df - , componentTargets = rawComponentTargets - , componentFP = rawComponentFP - , componentCOptions = rawComponentCOptions - , componentDependencyInfo = rawComponentDependencyInfo - } - -- Modify the map so the hieYaml now maps to the newly updated - -- ComponentInfos - -- Returns - -- . The information for the new component which caused this cache miss - -- . The modified information (without -inplace flags) for - -- existing packages - let (new,old) = NE.splitAt (NE.length new_deps) all_deps' - pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) - - - let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> IO () - session args@(hieYaml, _cfp, _opts, _libDir) = do - (new_deps, old_deps) <- packageSetup args - - -- For each component, now make a new HscEnvEq which contains the - -- HscEnv for the hie.yaml file but the DynFlags for that component - -- For GHC's supporting multi component sessions, we create a shared - -- HscEnv but set the active component accordingly - hscEnv <- emptyHscEnv ideNc _libDir - let new_cache = newComponentCache recorder optExtensions _cfp hscEnv - all_target_details <- new_cache old_deps new_deps - - let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') - all_targets' = concat all_target_details - this_dep_info <- getDependencyInfo $ maybeToList hieYaml - let (all_targets, this_flags_map, _this_options) - = case HM.lookup _cfp flags_map' of - Just this -> (all_targets', flags_map', this) - Nothing -> (this_target_details : all_targets', HM.insert _cfp this_flags flags_map', this_flags) - where - this_target_details = TargetDetails (TargetFile _cfp) this_error_env this_dep_info [_cfp] - this_flags = (this_error_env, this_dep_info) - this_error_env = ([this_error], Nothing) - this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) _cfp - (T.unlines - [ "No cradle target found. Is this file listed in the targets of your cradle?" - , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" - ]) - Nothing - - handleLoadingSuccBatch recorder sessionState hieYaml this_flags_map all_targets - -- Typecheck all files in the project on startup - checkProject <- getCheckProject - - -- The VFS doesn't change on cradle edits, re-use the old one. - -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - restartShakeSession VFSUnmodified "new component" [] $ do - keys2 <- invalidateShakeCache - keys1 <- extendKnownTargets all_targets - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) - void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - return [keys1, keys2] - - let consultCradle :: Maybe FilePath -> FilePath -> IO () - consultCradle hieYaml cfp = do - let lfpLog = makeRelative rootDir cfp - logWith recorder Info $ LogCradlePath lfpLog - when (isNothing hieYaml) $ - logWith recorder Warning $ LogCradleNotFound lfpLog - cradle <- loadCradle recorder hieYaml rootDir - when optTesting $ mRunLspT lspEnv $ - sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) - - -- Display a user friendly progress message here: They probably don't know what a cradle is - let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) - <> " (for " <> T.pack lfpLog <> ")" - - extraToLoads <- getExtraFilesToLoad sessionState cfp - eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ - withTrace "Load cradle" $ \addTag -> do - addTag "file" lfpLog - res <- cradleToOptsAndLibDir recorder (sessionLoading clientConfig) cradle cfp extraToLoads - addTag "result" (show res) - return res - - logWith recorder Debug $ LogSessionLoadingResult eopts - let ncfp = toNormalizedFilePath' cfp - case eopts of - -- The cradle gave us some options so get to work turning them - -- into and HscEnv. - Right (opts, libDir, version) -> do - let compileTime = fullCompilerVersion - case reverse $ readP_to_S parseVersion version of - [] -> error $ "GHC version could not be parsed: " <> version - ((runTime, _):_) - | compileTime == runTime -> session (hieYaml, ncfp, opts, libDir) - | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) - -- Failure case, either a cradle error or the none cradle - Left err -> do - -- what if the error to load file is one of old_files ? - let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readVar (loadedFiles sessionState) - let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) - if length errorToLoadNewFiles > 1 - then do - -- we are loading more files and failed, we need to retry - -- mark as less loaded files as failedLoadingFiles as possible - -- limitation is that when we are loading files, and the dependencies of old_files - -- are changed, and old_files are not valid anymore. - -- but they will still be in the old_files, and will not move to failedFiles. - -- And make other files failed to load in batch mode. - handleLoadingFailureBatch sessionState errorToLoadNewFiles - -- retry without other files - logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) - consultCradle hieYaml cfp - else do - -- we are only loading this file and it failed - let res = map (\err' -> renderCradleError err' cradle ncfp) err - handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err - -- This caches the mapping from hie.yaml + Mod.hs -> [String] - -- Returns the Ghc session and the cradle dependencies - let sessionOpts :: (Maybe FilePath, FilePath) - -> IO () - sessionOpts (hieYaml, file) = do - Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do - logWith recorder Info LogSessionLoadingChanged - -- If the dependencies are out of date then clear both caches and start - -- again. - atomically $ resetFileMaps sessionState - -- Don't even keep the name cache, we start from scratch here! - modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) - -- cleanup error loading files and cradle files - clearErrorLoadingFiles sessionState - clearCradleFiles sessionState - cacheKey <- invalidateShakeCache - restartShakeSession VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) - - v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) - case v >>= HM.lookup (toNormalizedFilePath' file) of - Just (_opts, old_di) -> do - deps_ok <- checkDependencyInfo old_di - if not deps_ok - then do - -- if deps are old, we can try to load the error files again - removeErrorLoadingFile sessionState file - removeCradleFile sessionState file - -- If the dependencies are out of date then clear both caches and start - -- again. - atomically $ resetFileMaps sessionState - -- Keep the same name cache - modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) - consultCradle hieYaml file - -- if deps are ok, we can just remove the file from pending files - else atomically $ removeFromPending sessionState file - Nothing -> consultCradle hieYaml file - - let checkInCache ::NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) - checkInCache ncfp = runMaybeT $ do - cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) - m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) - MaybeT $ pure $ HM.lookup ncfp m - - -- The main function which gets options for a file. We only want one of these running - -- at a time. Therefore the IORef contains the currently running cradle, if we try - -- to get some more options then we wait for the currently running action to finish - -- before attempting to do so. - let getOptionsLoop :: IO () - getOptionsLoop = do - -- Get the next file to load - file <- atomically $ S.readQueue (pendingFiles sessionState) - logWith recorder Debug (LogGetOptionsLoop file) - let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) - hieYaml <- cradleLoc file - let hieLoc = cachedHieYamlLocation <|> hieYaml - sessionOpts (hieLoc, file) `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file - getOptionsLoop - - -- | Given a file, this function will return the HscEnv and the dependencies - -- it would look up the cache first, if the cache is not available, it would - -- submit a request to the getOptionsLoop to get the options for the file - -- and wait until the options are available - let lookupOrWaitCache :: FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) - lookupOrWaitCache absFile = do - let ncfp = toNormalizedFilePath' absFile - res <- atomically $ do - -- wait until target file is not in pendingFiles - Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry - -- check if in the cache - checkInCache ncfp - logWith recorder Debug $ LogLookupSessionCache absFile - updateDateRes <- case res of - Just r -> do - depOk <- checkDependencyInfo (snd r) - if depOk - then return $ Just r - else return Nothing - _ -> return Nothing - case updateDateRes of - Just r -> return r - Nothing -> do - -- if not ok, we need to reload the session - atomically $ addToPending sessionState absFile - lookupOrWaitCache absFile + ideOptions <- getIdeOptions -- see Note [Serializing runs in separate thread] -- Start the getOptionsLoop if the queue is empty - liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ writeTQueue que getOptionsLoop + liftIO $ atomically $ Extra.whenM (isEmptyTQueue que) $ do + let newSessionLoadingOptions = SessionLoadingOptions + { findCradle = cradleLoc + , .. + } + sessionShake = SessionShake + { restartSession = restartShakeSession extras + , invalidateCache = invalidateShakeCache + , enqueueActions = shakeEnqueue extras + , lspContext = lspEnv extras + } + + writeTQueue que (getOptionsLoop recorder sessionShake sessionState newSessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc) -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. @@ -947,7 +674,305 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do let absolutePathsCradleDeps (eq, deps) = (eq, fmap toAbsolutePath $ Map.keys deps) returnWithVersion $ \file -> do let absFile = toAbsolutePath file - absolutePathsCradleDeps <$> lookupOrWaitCache absFile + absolutePathsCradleDeps <$> lookupOrWaitCache recorder sessionState absFile + +-- | Given a file, this function will return the HscEnv and the dependencies +-- it would look up the cache first, if the cache is not available, it would +-- submit a request to the getOptionsLoop to get the options for the file +-- and wait until the options are available +lookupOrWaitCache :: Recorder (WithPriority Log) -> SessionState -> FilePath -> IO (IdeResult HscEnvEq, DependencyInfo) +lookupOrWaitCache recorder sessionState absFile = do + let ncfp = toNormalizedFilePath' absFile + res <- atomically $ do + -- wait until target file is not in pendingFiles + Extra.whenM (S.lookup absFile (pendingFiles sessionState)) STM.retry + -- check if in the cache + checkInCache sessionState ncfp + logWith recorder Debug $ LogLookupSessionCache absFile + updateDateRes <- case res of + Just r -> do + depOk <- checkDependencyInfo (snd r) + if depOk + then return $ Just r + else return Nothing + _ -> return Nothing + case updateDateRes of + Just r -> return r + Nothing -> do + -- if not ok, we need to reload the session + atomically $ addToPending sessionState absFile + lookupOrWaitCache recorder sessionState absFile + +checkInCache :: SessionState -> NormalizedFilePath -> STM (Maybe (IdeResult HscEnvEq, DependencyInfo)) +checkInCache sessionState ncfp = runMaybeT $ do + cachedHieYamlLocation <- MaybeT $ STM.lookup ncfp (filesMap sessionState) + m <- MaybeT $ STM.lookup cachedHieYamlLocation (fileToFlags sessionState) + MaybeT $ pure $ HM.lookup ncfp m + +data SessionShake = SessionShake + { restartSession :: VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () + , invalidateCache :: IO Key + , enqueueActions :: DelayedAction () -> IO (IO ()) + , lspContext :: Maybe (LanguageContextEnv Config) + } + +-- The main function which gets options for a file. We only want one of these running +-- at a time. Therefore the IORef contains the currently running cradle, if we try +-- to get some more options then we wait for the currently running action to finish +-- before attempting to do so. +getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc = do + -- Get the next file to load + file <- atomically $ S.readQueue (pendingFiles sessionState) + logWith recorder Debug (LogGetOptionsLoop file) + let ncfp = toNormalizedFilePath' file + cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) + hieYaml <- findCradle sessionLoadingOptions file + let hieLoc = cachedHieYamlLocation <|> hieYaml + sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieLoc, file) + `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file + getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc + +-- | This caches the mapping from hie.yaml + Mod.hs -> [String] +-- Returns the Ghc session and the cradle dependencies +sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieYaml, file) = do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do + logWith recorder Info LogSessionLoadingChanged + -- If the dependencies are out of date then clear both caches and start + -- again. + atomically $ resetFileMaps sessionState + -- Don't even keep the name cache, we start from scratch here! + modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) + -- cleanup error loading files and cradle files + clearErrorLoadingFiles sessionState + clearCradleFiles sessionState + cacheKey <- invalidateCache sessionShake + restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) + + v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) + case v >>= HM.lookup (toNormalizedFilePath' file) of + Just (_opts, old_di) -> do + deps_ok <- checkDependencyInfo old_di + if not deps_ok + then do + -- if deps are old, we can try to load the error files again + removeErrorLoadingFile sessionState file + removeCradleFile sessionState file + -- If the dependencies are out of date then clear both caches and start + -- again. + atomically $ resetFileMaps sessionState + -- Keep the same name cache + modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) + consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file + -- if deps are ok, we can just remove the file from pending files + else atomically $ removeFromPending sessionState file + Nothing -> consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file + +consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp = do + (cradle, eopts) <- loadCradleWithNotifications recorder (optTesting ideOptions) + (lspContext sessionShake) sessionState (sessionLoading clientConfig) + (loadCradle sessionLoadingOptions) + rootDir hieYaml cfp + logWith recorder Debug $ LogSessionLoadingResult eopts + let ncfp = toNormalizedFilePath' cfp + case eopts of + -- The cradle gave us some options so get to work turning them + -- into and HscEnv. + Right (opts, libDir, version) -> do + let compileTime = fullCompilerVersion + case reverse $ readP_to_S parseVersion version of + [] -> error $ "GHC version could not be parsed: " <> version + ((runTime, _):_) + | compileTime == runTime -> session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, ncfp, opts, libDir) + | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) + -- Failure case, either a cradle error or the none cradle + Left err -> do + -- what if the error to load file is one of old_files ? + let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err + old_files <- readVar (loadedFiles sessionState) + let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) + if length errorToLoadNewFiles > 1 + then do + -- we are loading more files and failed, we need to retry + -- mark as less loaded files as failedLoadingFiles as possible + -- limitation is that when we are loading files, and the dependencies of old_files + -- are changed, and old_files are not valid anymore. + -- but they will still be in the old_files, and will not move to failedFiles. + -- And make other files failed to load in batch mode. + handleBatchLoadFailure sessionState errorToLoadNewFiles + -- retry without other files + logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) + consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp + else do + -- we are only loading this file and it failed + let res = map (\err' -> renderCradleError err' cradle ncfp) err + handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err + +session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, cfp, opts, libDir) = do + let initEmptyHscEnv = emptyHscEnv ideNc libDir + (new_deps, old_deps) <- packageSetup recorder sessionState rootDir (getCacheDirs sessionLoadingOptions) initEmptyHscEnv (hieYaml, cfp, opts) + + -- For each component, now make a new HscEnvEq which contains the + -- HscEnv for the hie.yaml file but the DynFlags for that component + -- For GHC's supporting multi component sessions, we create a shared + -- HscEnv but set the active component accordingly + hscEnv <- initEmptyHscEnv + let new_cache = newComponentCache recorder (optExtensions ideOptions) cfp hscEnv + all_target_details <- new_cache old_deps new_deps + (all_targets, this_flags_map) <- addErrorTargetIfUnknown all_target_details hieYaml cfp + + handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + -- Typecheck all files in the project on startup + loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + +-- | Create a new HscEnv from a hieYaml root and a set of options +packageSetup recorder sessionState rootDir getCacheDirs newEmptyHscEnv (hieYaml, cfp, opts) = do + -- Parse DynFlags for the newly discovered component + hscEnv <- newEmptyHscEnv + newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir + let deps = componentDependencies opts ++ maybeToList hieYaml + dep_info <- getDependencyInfo deps + -- Now lookup to see whether we are combining with an existing HscEnv + -- or making a new one. The lookup returns the HscEnv and a list of + -- information about other components loaded into the HscEnv + -- (unitId, DynFlag, Targets) + modifyVar (hscEnvs sessionState) $ + addComponentInfo recorder getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) + +addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags + all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps + -- Get all the unit-ids for things in this component + + all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let cacheDirOpts = componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo + { componentUnitId = rawComponentUnitId + , componentDynFlags = processed_df + , componentTargets = rawComponentTargets + , componentFP = rawComponentFP + , componentCOptions = rawComponentCOptions + , componentDependencyInfo = rawComponentDependencyInfo + } + -- Modify the map so the hieYaml now maps to the newly updated + -- ComponentInfos + -- Returns + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + let (new,old) = NE.splitAt (NE.length new_deps) all_deps' + pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + +addErrorTargetIfUnknown all_target_details hieYaml cfp = do + let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') + all_targets' = concat all_target_details + this_dep_info <- getDependencyInfo $ maybeToList hieYaml + let (all_targets, this_flags_map) = case HM.lookup cfp flags_map' of + Just _ -> (all_targets', flags_map') + Nothing -> (this_target_details : all_targets', HM.insert cfp this_flags flags_map') + where + this_target_details = TargetDetails (TargetFile cfp) this_error_env this_dep_info [cfp] + this_flags = (this_error_env, this_dep_info) + this_error_env = ([this_error], Nothing) + this_error = ideErrorWithSource (Just "cradle") (Just DiagnosticSeverity_Error) cfp + (T.unlines + [ "No cradle target found. Is this file listed in the targets of your cradle?" + , "If you are using a .cabal file, please ensure that this module is listed in either the exposed-modules or other-modules section" + ]) + Nothing + pure (all_targets, this_flags_map) + +-- | Populate the knownTargetsVar with all the +-- files in the project so that `knownFiles` can learn about them and +-- we can generate a complete module graph +extendKnownTargets recorder knownTargetsVar newTargets = do + knownTargets <- concatForM newTargets $ \TargetDetails{..} -> + case targetTarget of + TargetFile f -> do + -- If a target file has multiple possible locations, then we + -- assume they are all separate file targets. + -- This happens with '.hs-boot' files if they are in the root directory of the project. + -- GHC reports options such as '-i. A' as 'TargetFile A.hs' instead of 'TargetModule A'. + -- In 'fromTargetId', we dutifully look for '.hs-boot' files and add them to the + -- targetLocations of the TargetDetails. Then we add everything to the 'knownTargetsVar'. + -- However, when we look for a 'Foo.hs-boot' file in 'FindImports.hs', we look for either + -- + -- * TargetFile Foo.hs-boot + -- * TargetModule Foo + -- + -- If we don't generate a TargetFile for each potential location, we will only have + -- 'TargetFile Foo.hs' in the 'knownTargetsVar', thus not find 'TargetFile Foo.hs-boot' + -- and also not find 'TargetModule Foo'. + fs <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + pure $ map (\fp -> (TargetFile fp, Set.singleton fp)) (nubOrd (f:fs)) + TargetModule _ -> do + found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + return [(targetTarget, Set.fromList found)] + hasUpdate <- atomically $ do + known <- readTVar knownTargetsVar + let known' = flip mapHashed known $ \k -> unionKnownTargets k (mkKnownTargets knownTargets) + hasUpdate = if known /= known' then Just (unhashed known') else Nothing + writeTVar knownTargetsVar known' + pure hasUpdate + for_ hasUpdate $ \x -> + logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) + return $ toNoFileKey GetKnownTargets + +loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do + checkProject <- getCheckProject + + -- The VFS doesn't change on cradle edits, re-use the old one. + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + restartSession sessionShake VFSUnmodified "new component" [] $ do + keys2 <- invalidateCache sessionShake + keys1 <- extendKnownTargets recorder knownTargetsVar targets + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations targets) + void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] + +loadCradleWithNotifications recorder (IdeTesting isTesting) lspEnv sessionState sessionPref loadCradle rootDir hieYaml cfp= do + let lfpLog = makeRelative rootDir cfp + logWith recorder Info $ LogCradlePath lfpLog + when (isNothing hieYaml) $ + logWith recorder Warning $ LogCradleNotFound lfpLog + cradle <- loadCradle recorder hieYaml rootDir + when (isTesting) $ mRunLspT lspEnv $ + sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) + + -- Display a user friendly progress message here: They probably don't know what a cradle is + let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) + <> " (for " <> T.pack lfpLog <> ")" + + extraToLoads <- getExtraFilesToLoad sessionState cfp + eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ + withTrace "Load cradle" $ \addTag -> do + addTag "file" lfpLog + res <- cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads + addTag "result" (show res) + return res + pure (cradle, eopts) + -- | Run the specific cradle on a specific FilePath via hie-bios. -- This then builds dependencies or whatever based on the cradle, gets the From 8d962704b2dede9391a2ef4a85a9fd00580d54bc Mon Sep 17 00:00:00 2001 From: Sean Gillespie Date: Mon, 23 Jun 2025 08:57:46 -0400 Subject: [PATCH 55/90] Migrate change-type-signature-plugin to use structured diagnostics (#4632) * Migrate change-type-signature-plugin to use structured diagnostics * Refactor: Turn some getter functions into Lenses/Treversals * fix: Use updated traversal for error messages _TcRnMessage -> _TcRnMessageWithCtx * Refactor: Extract additional Prisms/Lenses into a common module --- .../src/Development/IDE/GHC/Compat/Error.hs | 43 +++- haskell-language-server.cabal | 3 + .../src/Ide/Plugin/ChangeTypeSignature.hs | 191 +++++++++++++----- .../test/Main.hs | 60 ++---- .../test/testdata/TExpectedActual.txt | 8 + .../test/testdata/TLocalBinding.txt | 8 + .../test/testdata/TLocalBindingShadow1.txt | 4 + .../test/testdata/TLocalBindingShadow2.txt | 9 + .../test/testdata/TRigidType.txt | 5 + .../test/testdata/TRigidType2.txt | 6 + .../test/testdata/error1.txt | 6 - .../test/testdata/error2.txt | 6 - .../test/testdata/error3.txt | 10 - .../test/testdata/error4.txt | 19 -- .../test/testdata/error5.txt | 15 -- src/HlsPlugins.hs | 2 +- 16 files changed, 240 insertions(+), 155 deletions(-) create mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt create mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt create mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt create mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt create mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt create mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt delete mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/error1.txt delete mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/error2.txt delete mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/error3.txt delete mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/error4.txt delete mode 100644 plugins/hls-change-type-signature-plugin/test/testdata/error5.txt diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs index 0255886726..01abbf1a66 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Error.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -17,16 +17,24 @@ module Development.IDE.GHC.Compat.Error ( DriverMessage (..), -- * General Diagnostics Diagnostic(..), - -- * Prisms for error selection + -- * Prisms and lenses for error selection _TcRnMessage, _TcRnMessageWithCtx, _GhcPsMessage, _GhcDsMessage, _GhcDriverMessage, _TcRnMissingSignature, + _TcRnSolverReport, + _TcRnMessageWithInfo, + reportContextL, + reportContentL, + _MismatchMessage, + _TypeEqMismatchActual, + _TypeEqMismatchExpected, ) where import Control.Lens +import Development.IDE.GHC.Compat (Type) import GHC.Driver.Errors.Types import GHC.HsToCore.Errors.Types import GHC.Tc.Errors.Types @@ -82,3 +90,36 @@ msgEnvelopeErrorL :: Lens' (MsgEnvelope e) e msgEnvelopeErrorL = lens errMsgDiagnostic (\envelope e -> envelope { errMsgDiagnostic = e } ) makePrisms ''TcRnMessage + +makeLensesWith + (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) + ''SolverReportWithCtxt + +-- | Focus 'MismatchMsg' from 'TcSolverReportMsg'. Currently, 'MismatchMsg' can be +-- extracted from 'CannotUnifyVariable' and 'Mismatch' constructors. +_MismatchMessage :: Traversal' TcSolverReportMsg MismatchMsg +_MismatchMessage focus (Mismatch msg t a c) = (\msg' -> Mismatch msg' t a c) <$> focus msg +_MismatchMessage focus (CannotUnifyVariable msg a) = flip CannotUnifyVariable a <$> focus msg +_MismatchMessage _ report = pure report + +-- | Focus 'teq_mismatch_expected' from 'TypeEqMismatch'. +_TypeEqMismatchExpected :: Traversal' MismatchMsg Type +#if MIN_VERSION_ghc(9,12,0) +_TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ expected _ _ _) = + (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected +#else +_TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ _ expected _ _ _) = + (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected +#endif +_TypeEqMismatchExpected _ mismatch = pure mismatch + +-- | Focus 'teq_mismatch_actual' from 'TypeEqMismatch'. +_TypeEqMismatchActual :: Traversal' MismatchMsg Type +#if MIN_VERSION_ghc(9,12,0) +_TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ actual _ _) = + (\actual' -> mismatch { teq_mismatch_actual = actual' }) <$> focus actual +#else +_TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ _ actual _ _) = + (\actual' -> mismatch { teq_mismatch_expected = actual' }) <$> focus actual +#endif +_TypeEqMismatchActual _ mismatch = pure mismatch diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index 42e8d11b60..ec397952cb 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1173,12 +1173,14 @@ library hls-change-type-signature-plugin build-depends: , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 + , lens , lsp-types , regex-tdfa , syb , text , transformers , containers + , ghc default-extensions: DataKinds ExplicitNamespaces @@ -1196,6 +1198,7 @@ test-suite hls-change-type-signature-plugin-tests build-depends: , filepath , haskell-language-server:hls-change-type-signature-plugin + , hls-plugin-api , hls-test-utils == 2.11.0.0 , regex-tdfa , text diff --git a/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs b/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs index df776e6d15..8b8b7e7d3a 100644 --- a/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs +++ b/plugins/hls-change-type-signature-plugin/src/Ide/Plugin/ChangeTypeSignature.hs @@ -1,47 +1,93 @@ +{-# LANGUAGE CPP #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE ViewPatterns #-} -- | An HLS plugin to provide code actions to change type signatures module Ide.Plugin.ChangeTypeSignature (descriptor -- * For Unit Tests + , Log(..) , errorMessageRegexes ) where -import Control.Monad (guard) -import Control.Monad.IO.Class (MonadIO) -import Control.Monad.Trans.Except (ExceptT) -import Data.Foldable (asum) -import qualified Data.Map as Map -import Data.Maybe (mapMaybe) -import Data.Text (Text) -import qualified Data.Text as T -import Development.IDE (realSrcSpanToRange) +import Control.Lens +import Control.Monad (guard) +import Control.Monad.IO.Class (MonadIO) +import Control.Monad.Trans.Class (MonadTrans (lift)) +import Control.Monad.Trans.Except (ExceptT (..)) +import Control.Monad.Trans.Maybe (MaybeT (..), hoistMaybe) +import Data.Foldable (asum) +import qualified Data.Map as Map +import Data.Maybe (catMaybes) +import Data.Text (Text) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic, + IdeState (..), Pretty (..), + Priority (..), Recorder, + WithPriority, + fdLspDiagnosticL, + fdStructuredMessageL, + logWith, realSrcSpanToRange) import Development.IDE.Core.PluginUtils -import Development.IDE.Core.RuleTypes (GetParsedModule (GetParsedModule)) -import Development.IDE.Core.Service (IdeState) -import Development.IDE.GHC.Compat -import Development.IDE.GHC.Util (printOutputable) -import Generics.SYB (extQ, something) -import Ide.Plugin.Error (PluginError, - getNormalizedFilePathE) -import Ide.Types (PluginDescriptor (..), - PluginId (PluginId), - PluginMethodHandler, - defaultPluginDescriptor, - mkPluginHandler) +import Development.IDE.Core.RuleTypes (GetParsedModule (GetParsedModule)) +import Development.IDE.GHC.Compat hiding (vcat) +import Development.IDE.GHC.Compat.Error (_MismatchMessage, + _TcRnMessageWithCtx, + _TcRnMessageWithInfo, + _TcRnSolverReport, + _TypeEqMismatchActual, + _TypeEqMismatchExpected, + msgEnvelopeErrorL, + reportContentL) +import Development.IDE.GHC.Util (printOutputable) +import Development.IDE.Types.Diagnostics (_SomeStructuredMessage) +import Generics.SYB (extQ, something) +import GHC.Tc.Errors.Types (ErrInfo (..), + TcRnMessageDetailed (..)) +import qualified Ide.Logger as Logger +import Ide.Plugin.Error (PluginError, + getNormalizedFilePathE) +import Ide.Types (Config, HandlerM, + PluginDescriptor (..), + PluginId (PluginId), + PluginMethodHandler, + defaultPluginDescriptor, + mkPluginHandler) import Language.LSP.Protocol.Message import Language.LSP.Protocol.Types -import Text.Regex.TDFA ((=~)) - -descriptor :: PluginId -> PluginDescriptor IdeState -descriptor plId = (defaultPluginDescriptor plId "Provides a code action to change the type signature of a binding if it is wrong") - { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction (codeActionHandler plId) } - -codeActionHandler :: PluginId -> PluginMethodHandler IdeState 'Method_TextDocumentCodeAction -codeActionHandler plId ideState _ CodeActionParams {_textDocument = TextDocumentIdentifier uri, _context = CodeActionContext diags _ _} = do - nfp <- getNormalizedFilePathE uri - decls <- getDecls plId ideState nfp - let actions = mapMaybe (generateAction plId uri decls) diags - pure $ InL actions +import Text.Regex.TDFA ((=~)) + +data Log + = LogErrInfoCtxt ErrInfo + | LogFindSigLocFailure DeclName + +instance Pretty Log where + pretty = \case + LogErrInfoCtxt (ErrInfo ctxt suppl) -> + Logger.vcat [fromSDoc ctxt, fromSDoc suppl] + LogFindSigLocFailure name -> + pretty ("Lookup signature location failure: " <> name) + where + fromSDoc = pretty . printOutputable + +descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState +descriptor recorder plId = + (defaultPluginDescriptor plId "Provides a code action to change the type signature of a binding if it is wrong") + { pluginHandlers = mkPluginHandler SMethod_TextDocumentCodeAction (codeActionHandler recorder plId) + } + +codeActionHandler + :: Recorder (WithPriority Log) + -> PluginId + -> PluginMethodHandler IdeState 'Method_TextDocumentCodeAction +codeActionHandler recorder plId ideState _ CodeActionParams{_textDocument, _range} = do + let TextDocumentIdentifier uri = _textDocument + nfp <- getNormalizedFilePathE uri + decls <- getDecls plId ideState nfp + + activeDiagnosticsInRange (shakeExtras ideState) nfp _range >>= \case + Nothing -> pure (InL []) + Just fileDiags -> do + actions <- lift $ mapM (generateAction recorder plId uri decls) fileDiags + pure (InL (catMaybes actions)) getDecls :: MonadIO m => PluginId -> IdeState -> NormalizedFilePath -> ExceptT PluginError m [LHsDecl GhcPs] getDecls (PluginId changeTypeSignatureId) state = @@ -67,39 +113,74 @@ data ChangeSignature = ChangeSignature { -- | the location of the declaration signature , declSrcSpan :: RealSrcSpan -- | the diagnostic to solve - , diagnostic :: Diagnostic + , diagnostic :: FileDiagnostic } -- | Create a CodeAction from a Diagnostic -generateAction :: PluginId -> Uri -> [LHsDecl GhcPs] -> Diagnostic -> Maybe (Command |? CodeAction) -generateAction plId uri decls diag = changeSigToCodeAction plId uri <$> diagnosticToChangeSig decls diag +generateAction + :: Recorder (WithPriority Log) + -> PluginId + -> Uri + -> [LHsDecl GhcPs] + -> FileDiagnostic + -> HandlerM Config (Maybe (Command |? CodeAction)) +generateAction recorder plId uri decls fileDiag = do + changeSig <- diagnosticToChangeSig recorder decls fileDiag + pure $ + changeSigToCodeAction plId uri <$> changeSig -- | Convert a diagnostic into a ChangeSignature and add the proper SrcSpan -diagnosticToChangeSig :: [LHsDecl GhcPs] -> Diagnostic -> Maybe ChangeSignature -diagnosticToChangeSig decls diagnostic = do - -- regex match on the GHC Error Message - (expectedType, actualType, declName) <- matchingDiagnostic diagnostic - -- Find the definition and it's location - declSrcSpan <- findSigLocOfStringDecl decls expectedType (T.unpack declName) - pure $ ChangeSignature{..} - +diagnosticToChangeSig + :: Recorder (WithPriority Log) + -> [LHsDecl GhcPs] + -> FileDiagnostic + -> HandlerM Config (Maybe ChangeSignature) +diagnosticToChangeSig recorder decls diagnostic = runMaybeT $ do + -- Extract expected, actual, and extra error info + (expectedType, actualType, errInfo) <- hoistMaybe $ do + msg <- diagnostic ^. fdStructuredMessageL ^? _SomeStructuredMessage + tcRnMsg <- msg ^. msgEnvelopeErrorL ^? _TcRnMessageWithCtx + (_, TcRnMessageDetailed errInfo tcRnMsg') <- tcRnMsg ^? _TcRnMessageWithInfo + solverReport <- tcRnMsg' ^? _TcRnSolverReport . _1 . reportContentL + mismatch <- solverReport ^? _MismatchMessage + expectedType <- mismatch ^? _TypeEqMismatchExpected + actualType <- mismatch ^? _TypeEqMismatchActual + + pure (showType expectedType, showType actualType, errInfo) + + logWith recorder Debug (LogErrInfoCtxt errInfo) + + -- Extract the declName from the extra error text + declName <- hoistMaybe (matchingDiagnostic errInfo) + + -- Look up location of declName. If it fails, log it + declSrcSpan <- + case findSigLocOfStringDecl decls expectedType (T.unpack declName) of + Just x -> pure x + Nothing -> do + logWith recorder Debug (LogFindSigLocFailure declName) + hoistMaybe Nothing + + pure ChangeSignature{..} + where + showType :: Type -> Text + showType = T.pack . showSDocUnsafe . pprTidiedType -- | If a diagnostic has the proper message create a ChangeSignature from it -matchingDiagnostic :: Diagnostic -> Maybe (ExpectedSig, ActualSig, DeclName) -matchingDiagnostic Diagnostic{_message} = asum $ map (unwrapMatch . (=~) _message) errorMessageRegexes +matchingDiagnostic :: ErrInfo -> Maybe DeclName +matchingDiagnostic ErrInfo{errInfoContext} = + asum $ map (unwrapMatch . (=~) errInfoTxt) errorMessageRegexes where - unwrapMatch :: (Text, Text, Text, [Text]) -> Maybe (ExpectedSig, ActualSig, DeclName) - -- due to using (.|\n) in regex we have to drop the erroneous, but necessary ("." doesn't match newlines), match - unwrapMatch (_, _, _, [expect, actual, _, name]) = Just (expect, actual, name) - unwrapMatch _ = Nothing + unwrapMatch :: (Text, Text, Text, [Text]) -> Maybe DeclName + unwrapMatch (_, _, _, [name]) = Just name + unwrapMatch _ = Nothing + + errInfoTxt = printOutputable errInfoContext -- | List of regexes that match various Error Messages errorMessageRegexes :: [Text] errorMessageRegexes = [ -- be sure to add new Error Messages Regexes at the bottom to not fail any existing tests - "Expected type: (.+)\n +Actual type: (.+)\n(.|\n)+In an equation for ‘(.+)’" - , "Couldn't match expected type ‘(.+)’ with actual type ‘(.+)’\n(.|\n)+In an equation for ‘(.+)’" - -- GHC >9.2 version of the first error regex - , "Expected: (.+)\n +Actual: (.+)\n(.|\n)+In an equation for ‘(.+)’" + "In an equation for ‘(.+)’:" ] -- | Given a String with the name of a declaration, GHC's "Expected Type", find the declaration that matches @@ -147,7 +228,7 @@ changeSigToCodeAction :: PluginId -> Uri -> ChangeSignature -> Command |? CodeAc changeSigToCodeAction (PluginId changeTypeSignatureId) uri ChangeSignature{..} = InR CodeAction { _title = mkChangeSigTitle declName actualType , _kind = Just (CodeActionKind_Custom ("quickfix." <> changeTypeSignatureId)) - , _diagnostics = Just [diagnostic] + , _diagnostics = Just [diagnostic ^. fdLspDiagnosticL ] , _isPreferred = Nothing , _disabled = Nothing , _edit = Just $ mkChangeSigEdit uri declSrcSpan (mkNewSignature declName actualType) diff --git a/plugins/hls-change-type-signature-plugin/test/Main.hs b/plugins/hls-change-type-signature-plugin/test/Main.hs index cd1b152c0b..72a2ab780e 100644 --- a/plugins/hls-change-type-signature-plugin/test/Main.hs +++ b/plugins/hls-change-type-signature-plugin/test/Main.hs @@ -5,7 +5,7 @@ import Data.Either (rights) import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.IO as TIO -import Ide.Plugin.ChangeTypeSignature (errorMessageRegexes) +import Ide.Plugin.ChangeTypeSignature (Log (..), errorMessageRegexes) import qualified Ide.Plugin.ChangeTypeSignature as ChangeTypeSignature import System.FilePath ((<.>), ()) import Test.Hls (CodeAction (..), Command, @@ -21,8 +21,7 @@ import Test.Hls (CodeAction (..), Command, getCodeActions, goldenWithHaskellDoc, knownBrokenForGhcVersions, - liftIO, - mkPluginTestDescriptor', + liftIO, mkPluginTestDescriptor, openDoc, runSessionWithServer, testCase, testGroup, toEither, type (|?), waitForBuildQueue, @@ -32,16 +31,19 @@ import Text.Regex.TDFA ((=~)) main :: IO () main = defaultTestRunner test -changeTypeSignaturePlugin :: PluginTestDescriptor () -changeTypeSignaturePlugin = mkPluginTestDescriptor' ChangeTypeSignature.descriptor "changeTypeSignature" +changeTypeSignaturePlugin :: PluginTestDescriptor Log +changeTypeSignaturePlugin = + mkPluginTestDescriptor + ChangeTypeSignature.descriptor + "changeTypeSignature" test :: TestTree test = testGroup "changeTypeSignature" [ testRegexes , codeActionTest "TExpectedActual" 4 11 - , knownBrokenForGhcVersions [GHC96 .. GHC912] "Error Message in 9.2+ does not provide enough info" $ + , knownBrokenForGhcVersions [GHC96 .. GHC912] "Error Message in 9.6+ does not provide enough info" $ codeActionTest "TRigidType" 4 14 - , codeActionTest "TRigidType2" 4 6 + , codeActionTest "TRigidType2" 4 8 , codeActionTest "TLocalBinding" 7 22 , codeActionTest "TLocalBindingShadow1" 11 8 , codeActionTest "TLocalBindingShadow2" 7 22 @@ -50,43 +52,17 @@ test = testGroup "changeTypeSignature" [ testRegexes :: TestTree testRegexes = testGroup "Regex Testing" [ - testRegexOne - , testRegexTwo - , testRegex921One - ] - -testRegexOne :: TestTree -testRegexOne = testGroup "Regex One" [ - regexTest "error1.txt" regex True - , regexTest "error2.txt" regex True - , regexTest "error3.txt" regex False - , regexTest "error4.txt" regex True - , regexTest "error5.txt" regex True + regexTest "TExpectedActual.txt" regex True + , regexTest "TLocalBinding.txt" regex True + , regexTest "TLocalBindingShadow1.txt" regex True + , regexTest "TLocalBindingShadow2.txt" regex True + -- Error message from GHC currently does not not provide enough info + , regexTest "TRigidType.txt" regex False + , regexTest "TRigidType2.txt" regex True ] where regex = errorMessageRegexes !! 0 -testRegexTwo :: TestTree -testRegexTwo = testGroup "Regex Two" [ - regexTest "error1.txt" regex False - , regexTest "error2.txt" regex False - , regexTest "error3.txt" regex True - , regexTest "error4.txt" regex False - , regexTest "error5.txt" regex False - ] - where - regex = errorMessageRegexes !! 1 - --- test ghc-9.2 error message regex -testRegex921One :: TestTree -testRegex921One = testGroup "Regex One" [ - regexTest "ghc921-error1.txt" regex True - , regexTest "ghc921-error2.txt" regex True - , regexTest "ghc921-error3.txt" regex True - ] - where - regex = errorMessageRegexes !! 2 - testDataDir :: FilePath testDataDir = "plugins" "hls-change-type-signature-plugin" "test" "testdata" @@ -123,8 +99,8 @@ regexTest :: FilePath -> Text -> Bool -> TestTree regexTest fp regex shouldPass = testCase fp $ do msg <- TIO.readFile (testDataDir fp) case (msg =~ regex :: (Text, Text, Text, [Text]), shouldPass) of - ((_, _, _, [_, _, _, _]), True) -> pure () - ((_, _, _, [_, _, _, _]), False) -> assertFailure $ "Unexpected match: " <> fp <> " with " <> T.unpack regex + ((_, _, _, [_]), True) -> pure () + ((_, _, _, [_]), False) -> assertFailure $ "Unexpected match: " <> fp <> " with " <> T.unpack regex (_, True) -> assertFailure $ "Failed to match: " <> fp <> " with " <> T.unpack regex (_, False) -> pure () diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt new file mode 100644 index 0000000000..6a8246a921 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TExpectedActual.txt @@ -0,0 +1,8 @@ +In the expression: go +In an equation for ‘fullSig’: +fullSig + = go + where + go = head . reverse + + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt new file mode 100644 index 0000000000..3f31dc48b9 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBinding.txt @@ -0,0 +1,8 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM +In the expression: + let + test :: Int -> Int + test = forM + in x + 1 diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt new file mode 100644 index 0000000000..ef782e8aec --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow1.txt @@ -0,0 +1,4 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt new file mode 100644 index 0000000000..bea2526eb9 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TLocalBindingShadow2.txt @@ -0,0 +1,9 @@ +Probable cause: ‘forM’ is applied to too few arguments +In the expression: forM +In an equation for ‘test’: test = forM +In the expression: + let + test :: Int -> Int + test = forM + in test x [GHC-83865] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt new file mode 100644 index 0000000000..f9e78c97ae --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.txt @@ -0,0 +1,5 @@ +In the expression: go . head . reverse +Relevant bindings include + test :: a -> Int + (bound at /home/sgillespie/dev/haskell/haskell-language-server/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType.hs:4:1) [GHC-25897] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt new file mode 100644 index 0000000000..343129a942 --- /dev/null +++ b/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.txt @@ -0,0 +1,6 @@ +In the expression: head +In an equation for ‘test’: test = head +Relevant bindings include + test :: a -> Int + (bound at /home/sgillespie/dev/haskell/haskell-language-server/plugins/hls-change-type-signature-plugin/test/testdata/TRigidType2.hs:4:1) [GHC-25897] + diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error1.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error1.txt deleted file mode 100644 index 37f0aa4a81..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error1.txt +++ /dev/null @@ -1,6 +0,0 @@ - • Couldn't match type ‘Int’ - with ‘Data.HashSet.Internal.HashSet Int’ - Expected type: Int -> Int - Actual type: Data.HashSet.Internal.HashSet Int -> Int - • In the expression: head . toList - In an equation for ‘test’: test = head . toList diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error2.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error2.txt deleted file mode 100644 index 497f8350a5..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error2.txt +++ /dev/null @@ -1,6 +0,0 @@ - • Couldn't match type ‘b0 -> t0 a0 -> b0’ with ‘Int’ - Expected type: Int -> Int - Actual type: (b0 -> a0 -> b0) -> b0 -> t0 a0 -> b0 - • Probable cause: ‘foldl’ is applied to too few arguments - In the expression: foldl - In an equation for ‘test’: test = foldl diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error3.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error3.txt deleted file mode 100644 index 0cbddad7c4..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error3.txt +++ /dev/null @@ -1,10 +0,0 @@ - • Couldn't match expected type ‘Int’ with actual type ‘[Int]’ - • In the expression: map (+ x) [1, 2, 3] - In an equation for ‘test’: - test x - = map (+ x) [1, 2, 3] - where - go = head . reverse - | -152 | test x = map (+ x) [1,2,3] - | ^^^^^^^^^^^^^^^^^ diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error4.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error4.txt deleted file mode 100644 index 323cf7d4db..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error4.txt +++ /dev/null @@ -1,19 +0,0 @@ - • Couldn't match type ‘a’ with ‘[[Int]]’ - ‘a’ is a rigid type variable bound by - the type signature for: - test :: forall a. Ord a => a -> Int - at src/Ide/Plugin/ChangeTypeSignature.hs:154:1-25 - Expected type: a -> Int - Actual type: [[Int]] -> Int - • In the expression: go . head . reverse - In an equation for ‘test’: - test - = go . head . reverse - where - go = head . reverse - • Relevant bindings include - test :: a -> Int - (bound at src/Ide/Plugin/ChangeTypeSignature.hs:155:1) - | -155 | test = go . head . reverse - | ^^^^^^^^^^^^^^^^^^^ diff --git a/plugins/hls-change-type-signature-plugin/test/testdata/error5.txt b/plugins/hls-change-type-signature-plugin/test/testdata/error5.txt deleted file mode 100644 index a7a5d9a20b..0000000000 --- a/plugins/hls-change-type-signature-plugin/test/testdata/error5.txt +++ /dev/null @@ -1,15 +0,0 @@ - • Couldn't match type ‘(a0 -> m0 b0) -> m0 (t0 b0)’ with ‘Int’ - Expected type: Int -> Int - Actual type: t0 a0 -> (a0 -> m0 b0) -> m0 (t0 b0) - • Probable cause: ‘forM’ is applied to too few arguments - In the expression: forM - In an equation for ‘test’: test = forM - In an equation for ‘implicit’: - implicit - = return OpTEmpty - where - test :: Int -> Int - test = forM - | -82 | test = forM - | ^^^^ diff --git a/src/HlsPlugins.hs b/src/HlsPlugins.hs index 87a1af7392..4c135fc48b 100644 --- a/src/HlsPlugins.hs +++ b/src/HlsPlugins.hs @@ -224,7 +224,7 @@ idePlugins recorder = pluginDescToIdePlugins allPlugins let pId = "codeRange" in CodeRange.descriptor (pluginRecorder pId) pId: #endif #if hls_changeTypeSignature - ChangeTypeSignature.descriptor "changeTypeSignature" : + let pId = "changeTypeSignature" in ChangeTypeSignature.descriptor (pluginRecorder pId) pId : #endif #if hls_gadt GADT.descriptor "gadt" : From f43d81105608930d132fa149e0746f01f46599a6 Mon Sep 17 00:00:00 2001 From: patrick Date: Tue, 24 Jun 2025 05:48:12 +0800 Subject: [PATCH 56/90] bump up hiedb version (#4640) --- ghcide/session-loader/Development/IDE/Session.hs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 78bfb798af..1bcec71181 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -223,7 +223,7 @@ instance Pretty Log where -- | Bump this version number when making changes to the format of the data stored in hiedb hiedbDataVersion :: String -hiedbDataVersion = "1" +hiedbDataVersion = "2" data CacheDirs = CacheDirs { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} @@ -956,6 +956,8 @@ CallStack (from HasCallStack): expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst ``` +and many more. + To mitigate this, we set the cache directory for each component dependent on the components of the current `HscEnv`, additionally to the component options of the respective components. From 5d221b9e8396ef71d1d72cfb2c866c8066cec5ad Mon Sep 17 00:00:00 2001 From: patrick Date: Thu, 26 Jun 2025 04:28:54 +0800 Subject: [PATCH 57/90] Fix reference fields gives too many results (#4641) References to record fields gives too many results This commit adds tests for references to record fields and updates the symbol retrieval logic to ensure that references to record fields are handled correctly. The changes is small: - The `getNamesAtPoint` function in `AtPoint.hs` now only searches for `Name` that are in the source node from `HieAst`. Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- ghcide-test/data/references/Fields.hs | 18 +++++++++++++++++ ghcide-test/data/references/Main.hs | 5 ++++- ghcide-test/data/references/hie.yaml | 2 +- ghcide-test/exe/ReferenceTests.hs | 22 +++++++++++++++++++++ ghcide/src/Development/IDE/Spans/AtPoint.hs | 2 +- 5 files changed, 46 insertions(+), 3 deletions(-) create mode 100644 ghcide-test/data/references/Fields.hs diff --git a/ghcide-test/data/references/Fields.hs b/ghcide-test/data/references/Fields.hs new file mode 100644 index 0000000000..1b935f31c9 --- /dev/null +++ b/ghcide-test/data/references/Fields.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE RecordWildCards #-} +module Fields where + +data Foo = MkFoo + { + barr :: String, + bazz :: String + } + +fooUse0 :: Foo -> String +fooUse0 MkFoo{barr} = "5" + +fooUse1 :: Foo -> String +fooUse1 MkFoo{..} = "6" + +fooUse2 :: String -> String -> Foo +fooUse2 bar baz = + MkFoo{..} diff --git a/ghcide-test/data/references/Main.hs b/ghcide-test/data/references/Main.hs index 4a976f3fd0..aae14355d4 100644 --- a/ghcide-test/data/references/Main.hs +++ b/ghcide-test/data/references/Main.hs @@ -1,7 +1,7 @@ module Main where import References - +import Fields main :: IO () main = return () @@ -12,3 +12,6 @@ b = a + 1 acc :: Account acc = Savings + +fooUse3 :: String -> String -> Foo +fooUse3 bar baz = MkFoo{barr = bar, bazz = baz} diff --git a/ghcide-test/data/references/hie.yaml b/ghcide-test/data/references/hie.yaml index db42bad0c0..9e68765ba1 100644 --- a/ghcide-test/data/references/hie.yaml +++ b/ghcide-test/data/references/hie.yaml @@ -1 +1 @@ -cradle: {direct: {arguments: ["Main","OtherModule","OtherOtherModule","References"]}} +cradle: {direct: {arguments: ["Main","OtherModule","OtherOtherModule","References", "Fields"]}} diff --git a/ghcide-test/exe/ReferenceTests.hs b/ghcide-test/exe/ReferenceTests.hs index cdbf8e472d..758506e54d 100644 --- a/ghcide-test/exe/ReferenceTests.hs +++ b/ghcide-test/exe/ReferenceTests.hs @@ -156,6 +156,28 @@ tests = testGroup "references" , ("References.hs", 16, 0) ] ] + -- Fields.hs does not depend on Main.hs + -- so we can only find references in Fields.hs + , testGroup "references to record fields" + [ referenceTest "references record fields in the same file" + ("Fields.hs", 5, 4) + YesIncludeDeclaration + [ ("Fields.hs", 5, 4) + , ("Fields.hs", 10, 14) + , ("Fields.hs", 13, 14) + ] + + -- Main.hs depends on Fields.hs, so we can find references + -- from Main.hs to Fields.hs + , referenceTest "references record fields cross modules" + ("Main.hs", 16, 24) + YesIncludeDeclaration + [ ("Fields.hs", 5, 4) + , ("Fields.hs", 10, 14) + , ("Fields.hs", 13, 14) + , ("Main.hs", 16, 24) + ] + ] ] -- | When we ask for all references to symbol "foo", should the declaration "foo diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs index a577cae32e..16b4f65b11 100644 --- a/ghcide/src/Development/IDE/Spans/AtPoint.hs +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -113,7 +113,7 @@ foiReferencesAtPoint file pos (FOIReferences asts) = getNamesAtPoint :: HieASTs a -> Position -> PositionMapping -> [Name] getNamesAtPoint hf pos mapping = - concat $ pointCommand hf posFile (rights . M.keys . getNodeIds) + concat $ pointCommand hf posFile (rights . M.keys . getSourceNodeIds) where posFile = fromMaybe pos $ fromCurrentPosition mapping pos From 29b2ecb53c41edd95d8d28e5a16a293b26acb50f Mon Sep 17 00:00:00 2001 From: Sean Gillespie Date: Tue, 1 Jul 2025 15:43:48 -0400 Subject: [PATCH 58/90] Fix build for GHC 9.10.2 (#4644) The constructor for `TypeEqMismatch` changed at 9.10.2 (not at 9.12 as I previously thought) --- ghcide/src/Development/IDE/GHC/Compat/Error.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs index 01abbf1a66..e4fb9c26b4 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Error.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -104,7 +104,7 @@ _MismatchMessage _ report = pure report -- | Focus 'teq_mismatch_expected' from 'TypeEqMismatch'. _TypeEqMismatchExpected :: Traversal' MismatchMsg Type -#if MIN_VERSION_ghc(9,12,0) +#if MIN_VERSION_ghc(9,10,2) _TypeEqMismatchExpected focus mismatch@(TypeEqMismatch _ _ _ expected _ _ _) = (\expected' -> mismatch { teq_mismatch_expected = expected' }) <$> focus expected #else @@ -115,7 +115,7 @@ _TypeEqMismatchExpected _ mismatch = pure mismatch -- | Focus 'teq_mismatch_actual' from 'TypeEqMismatch'. _TypeEqMismatchActual :: Traversal' MismatchMsg Type -#if MIN_VERSION_ghc(9,12,0) +#if MIN_VERSION_ghc(9,10,2) _TypeEqMismatchActual focus mismatch@(TypeEqMismatch _ _ _ _ actual _ _) = (\actual' -> mismatch { teq_mismatch_actual = actual' }) <$> focus actual #else From d4fbc2c339e16b60df12788b2d272bd0884640ff Mon Sep 17 00:00:00 2001 From: fendor Date: Fri, 4 Jul 2025 18:42:25 +0200 Subject: [PATCH 59/90] Remove unused _removeInplacePackages function --- .../session-loader/Development/IDE/Session.hs | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 045bdcbc54..f6ebe43481 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -1324,24 +1324,6 @@ getDependencyInfo fs = Map.fromList <$> mapM do_one fs do_one :: FilePath -> IO (FilePath, Maybe UTCTime) do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) --- | This function removes all the -package flags which refer to packages we --- are going to deal with ourselves. For example, if a executable depends --- on a library component, then this function will remove the library flag --- from the package flags for the executable --- --- There are several places in GHC (for example the call to hptInstances in --- tcRnImports) which assume that all modules in the HPT have the same unit --- ID. Therefore we create a fake one and give them all the same unit id. -_removeInplacePackages --Only used in ghc < 9.4 - :: UnitId -- ^ fake uid to use for our internal component - -> [UnitId] - -> DynFlags - -> (DynFlags, [UnitId]) -_removeInplacePackages fake_uid us df = (setHomeUnitId_ fake_uid $ - df { packageFlags = ps }, uids) - where - (uids, ps) = Compat.filterInplaceUnits us (packageFlags df) - -- | Memoize an IO function, with the characteristics: -- -- * If multiple people ask for a result simultaneously, make sure you only compute it once. From ff807c335960be11df8c8d08271a2c6e73c628c1 Mon Sep 17 00:00:00 2001 From: fendor Date: Fri, 4 Jul 2025 19:37:36 +0200 Subject: [PATCH 60/90] Introduce SessionM for bundling read-only variables --- .../session-loader/Development/IDE/Session.hs | 207 +++++++++++------- 1 file changed, 131 insertions(+), 76 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index f6ebe43481..abeb9cb5d1 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -28,7 +28,6 @@ import Control.Monad.IO.Class import Control.Monad.Trans.Maybe (MaybeT (MaybeT, runMaybeT)) import qualified Crypto.Hash.SHA1 as H import Data.Aeson hiding (Error, Key) -import Data.Bifunctor import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B import Data.Default @@ -36,7 +35,6 @@ import Data.Either.Extra import Data.Function import Data.Hashable hiding (hash) import qualified Data.HashMap.Strict as HM -import Data.IORef import Data.List import Data.List.Extra as L import Data.List.NonEmpty (NonEmpty (..)) @@ -119,7 +117,7 @@ import qualified System.Random as Random import System.Random (RandomGen) import Text.ParserCombinators.ReadP (readP_to_S) -import Control.Concurrent.STM (STM) +import Control.Concurrent.STM (STM, TVar) import qualified Control.Monad.STM as STM import qualified Development.IDE.Session.OrderedSet as S import qualified Focus @@ -129,6 +127,7 @@ import GHC.Types.Error (errMsgDiagnostic, singleMessage) import GHC.Unit.State import qualified StmContainers.Map as STM +import Control.Monad.Trans.Reader #if MIN_VERSION_ghc(9,13,0) import GHC.Driver.Make (checkHomeUnitsClosed) @@ -479,33 +478,33 @@ data SessionState = SessionState -- These functions encapsulate common operations on the SessionState -- | Add a file to the set of files with errors during loading -addErrorLoadingFile :: SessionState -> FilePath -> IO () +addErrorLoadingFile :: MonadIO m => SessionState -> FilePath -> m () addErrorLoadingFile state file = - modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) + liftIO $ modifyVar_' (failedFiles state) (\xs -> return $ Set.insert file xs) -- | Remove a file from the set of files with errors during loading -removeErrorLoadingFile :: SessionState -> FilePath -> IO () +removeErrorLoadingFile :: MonadIO m => SessionState -> FilePath -> m () removeErrorLoadingFile state file = - modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) + liftIO $ modifyVar_' (failedFiles state) (\xs -> return $ Set.delete file xs) -addCradleFiles :: SessionState -> HashSet FilePath -> IO () +addCradleFiles :: MonadIO m => SessionState -> HashSet FilePath -> m () addCradleFiles state files = - modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) + liftIO $ modifyVar_' (loadedFiles state) (\xs -> return $ files <> xs) -- | Remove a file from the cradle files set -removeCradleFile :: SessionState -> FilePath -> IO () +removeCradleFile :: MonadIO m => SessionState -> FilePath -> m () removeCradleFile state file = - modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) + liftIO $ modifyVar_' (loadedFiles state) (\xs -> return $ Set.delete file xs) -- | Clear error loading files and reset to empty set -clearErrorLoadingFiles :: SessionState -> IO () +clearErrorLoadingFiles :: MonadIO m => SessionState -> m () clearErrorLoadingFiles state = - modifyVar_' (failedFiles state) (const $ return Set.empty) + liftIO $ modifyVar_' (failedFiles state) (const $ return Set.empty) -- | Clear cradle files and reset to empty set -clearCradleFiles :: SessionState -> IO () +clearCradleFiles :: MonadIO m => SessionState -> m () clearCradleFiles state = - modifyVar_' (loadedFiles state) (const $ return Set.empty) + liftIO $ modifyVar_' (loadedFiles state) (const $ return Set.empty) -- | Reset the file maps in the session state resetFileMaps :: SessionState -> STM () @@ -547,13 +546,13 @@ getPendingFiles :: SessionState -> IO (HashSet FilePath) getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending -handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> IO () +handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> SessionM () handleSingleFileProcessingError' state hieYaml file e = do handleSingleFileProcessingError state hieYaml file [renderPackageSetupException file e] mempty -- | Common pattern: Insert file flags, insert file mapping, and remove from pending -handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> IO () -handleSingleFileProcessingError state hieYaml file diags extraDepFiles = do +handleSingleFileProcessingError :: SessionState -> Maybe FilePath -> FilePath -> [FileDiagnostic] -> [FilePath] -> SessionM () +handleSingleFileProcessingError state hieYaml file diags extraDepFiles = liftIO $ do dep <- getDependencyInfo $ maybeToList hieYaml <> extraDepFiles let ncfp = toNormalizedFilePath' file let flags = ((diags, Nothing), dep) @@ -584,16 +583,17 @@ getExtraFilesToLoad state cfp = do -- -- If the loading configuration changed, we likely should restart the session -- in its entirety. -didSessionLoadingPreferenceConfigChange :: SessionState -> Config -> IO Bool -didSessionLoadingPreferenceConfigChange s clientConfig = do +didSessionLoadingPreferenceConfigChange :: SessionState -> SessionM Bool +didSessionLoadingPreferenceConfigChange s = do + clientConfig <- asks sessionClientConfig let biosSessionLoadingVar = sessionLoadingPreferenceConfig s - mLoadingConfig <- readVar biosSessionLoadingVar + mLoadingConfig <- liftIO $ readVar biosSessionLoadingVar case mLoadingConfig of Nothing -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + liftIO $ writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) pure False Just loadingConfig -> do - writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) + liftIO $ writeVar biosSessionLoadingVar (Just (sessionLoading clientConfig)) pure (loadingConfig /= sessionLoading clientConfig) newSessionState :: IO SessionState @@ -661,10 +661,17 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do { restartSession = restartShakeSession extras , invalidateCache = invalidateShakeCache , enqueueActions = shakeEnqueue extras - , lspContext = lspEnv extras + } + sessionEnv = SessionEnv + { sessionLspContext = lspEnv extras + , sessionRootDir = rootDir + , sessionIdeOptions = ideOptions + , sessionClientConfig = clientConfig + , sessionSharedNameCache = ideNc + , sessionLoadingOptions = newSessionLoadingOptions } - writeTQueue que (getOptionsLoop recorder sessionShake sessionState newSessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc) + writeTQueue que (runReaderT (getOptionsLoop recorder sessionShake sessionState knownTargetsVar) sessionEnv) -- Each one of deps will be registered as a FileSystemWatcher in the GhcSession action -- so that we can get a workspace/didChangeWatchedFiles notification when a dep changes. @@ -713,45 +720,55 @@ data SessionShake = SessionShake { restartSession :: VFSModified -> String -> [DelayedAction ()] -> IO [Key] -> IO () , invalidateCache :: IO Key , enqueueActions :: DelayedAction () -> IO (IO ()) - , lspContext :: Maybe (LanguageContextEnv Config) } --- The main function which gets options for a file. We only want one of these running +data SessionEnv = SessionEnv + { sessionLspContext :: Maybe (LanguageContextEnv Config) + , sessionRootDir :: FilePath + , sessionIdeOptions :: IdeOptions + , sessionClientConfig :: Config + , sessionSharedNameCache :: NameCache + , sessionLoadingOptions :: SessionLoadingOptions + } + +type SessionM = ReaderT SessionEnv IO + +-- | The main function which gets options for a file. We only want one of these running -- at a time. Therefore the IORef contains the currently running cradle, if we try -- to get some more options then we wait for the currently running action to finish -- before attempting to do so. -getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc = do +getOptionsLoop :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> SessionM () +getOptionsLoop recorder sessionShake sessionState knownTargetsVar = forever $ do + sessionLoadingOptions <- asks sessionLoadingOptions -- Get the next file to load - file <- atomically $ S.readQueue (pendingFiles sessionState) + file <- liftIO $ atomically $ S.readQueue (pendingFiles sessionState) logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file - cachedHieYamlLocation <- join <$> atomically (STM.lookup ncfp (filesMap sessionState)) - hieYaml <- findCradle sessionLoadingOptions file + cachedHieYamlLocation <- join <$> liftIO (atomically (STM.lookup ncfp (filesMap sessionState))) + hieYaml <- liftIO $ findCradle sessionLoadingOptions file let hieLoc = cachedHieYamlLocation <|> hieYaml - sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieLoc, file) + sessionOpts recorder sessionShake sessionState knownTargetsVar (hieLoc, file) `Safe.catch` handleSingleFileProcessingError' sessionState hieLoc file - getOptionsLoop recorder sessionShake sessionState sessionLoadingOptions ideOptions clientConfig knownTargetsVar rootDir ideNc -- | This caches the mapping from hie.yaml + Mod.hs -> [String] -- Returns the Ghc session and the cradle dependencies -sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc (hieYaml, file) = do - Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState clientConfig) $ do +sessionOpts :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> (Maybe FilePath, FilePath) -> SessionM () +sessionOpts recorder sessionShake sessionState knownTargetsVar (hieYaml, file) = do + Extra.whenM (didSessionLoadingPreferenceConfigChange sessionState) $ do logWith recorder Info LogSessionLoadingChanged - -- If the dependencies are out of date then clear both caches and start - -- again. - atomically $ resetFileMaps sessionState + liftIO $ atomically $ resetFileMaps sessionState -- Don't even keep the name cache, we start from scratch here! - modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) + liftIO $ modifyVar_ (hscEnvs sessionState) (const (return Map.empty)) -- cleanup error loading files and cradle files clearErrorLoadingFiles sessionState clearCradleFiles sessionState - cacheKey <- invalidateCache sessionShake - restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) + cacheKey <- liftIO $ invalidateCache sessionShake + liftIO $ restartSession sessionShake VFSUnmodified "didSessionLoadingPreferenceConfigChange" [] (return [cacheKey]) - v <- atomically $ STM.lookup hieYaml (fileToFlags sessionState) + v <- liftIO $ atomically $ STM.lookup hieYaml (fileToFlags sessionState) case v >>= HM.lookup (toNormalizedFilePath' file) of Just (_opts, old_di) -> do - deps_ok <- checkDependencyInfo old_di + deps_ok <- liftIO $ checkDependencyInfo old_di if not deps_ok then do -- if deps are old, we can try to load the error files again @@ -759,19 +776,22 @@ sessionOpts recorder sessionShake ideOptions sessionState sessionLoadingOptions removeCradleFile sessionState file -- If the dependencies are out of date then clear both caches and start -- again. - atomically $ resetFileMaps sessionState + liftIO $ atomically $ resetFileMaps sessionState -- Keep the same name cache - modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) - consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file + liftIO $ modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file -- if deps are ok, we can just remove the file from pending files - else atomically $ removeFromPending sessionState file - Nothing -> consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml file - -consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp = do - (cradle, eopts) <- loadCradleWithNotifications recorder (optTesting ideOptions) - (lspContext sessionShake) sessionState (sessionLoading clientConfig) - (loadCradle sessionLoadingOptions) - rootDir hieYaml cfp + else liftIO $ atomically $ removeFromPending sessionState file + Nothing -> + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file + +consultCradle :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> Maybe FilePath -> FilePath -> SessionM () +consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml cfp = do + loadingOptions <- asks sessionLoadingOptions + (cradle, eopts) <- loadCradleWithNotifications recorder + sessionState + (loadCradle loadingOptions recorder) + hieYaml cfp logWith recorder Debug $ LogSessionLoadingResult eopts let ncfp = toNormalizedFilePath' cfp case eopts of @@ -782,13 +802,13 @@ consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOption case reverse $ readP_to_S parseVersion version of [] -> error $ "GHC version could not be parsed: " <> version ((runTime, _):_) - | compileTime == runTime -> session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, ncfp, opts, libDir) + | compileTime == runTime -> session recorder sessionShake sessionState knownTargetsVar (hieYaml, ncfp, opts, libDir) | otherwise -> handleSingleFileProcessingError' sessionState hieYaml cfp (GhcVersionMismatch{..}) -- Failure case, either a cradle error or the none cradle Left err -> do -- what if the error to load file is one of old_files ? let attemptToLoadFiles = Set.delete cfp $ Set.fromList $ concatMap cradleErrorLoadingFiles err - old_files <- readVar (loadedFiles sessionState) + old_files <- liftIO $ readVar (loadedFiles sessionState) let errorToLoadNewFiles = cfp : Set.toList (attemptToLoadFiles `Set.difference` old_files) if length errorToLoadNewFiles > 1 then do @@ -798,46 +818,66 @@ consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOption -- are changed, and old_files are not valid anymore. -- but they will still be in the old_files, and will not move to failedFiles. -- And make other files failed to load in batch mode. - handleBatchLoadFailure sessionState errorToLoadNewFiles + liftIO $ handleBatchLoadFailure sessionState errorToLoadNewFiles -- retry without other files logWith recorder Info $ LogSessionReloadOnError cfp (Set.toList attemptToLoadFiles) - consultCradle recorder sessionShake ideOptions sessionState sessionLoadingOptions clientConfig knownTargetsVar rootDir ideNc hieYaml cfp + consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml cfp else do -- we are only loading this file and it failed let res = map (\err' -> renderCradleError err' cradle ncfp) err handleSingleFileProcessingError sessionState hieYaml cfp res $ concatMap cradleErrorDependencies err -session recorder sessionShake sessionState ideOptions sessionLoadingOptions knownTargetsVar rootDir ideNc (hieYaml, cfp, opts, libDir) = do - let initEmptyHscEnv = emptyHscEnv ideNc libDir - (new_deps, old_deps) <- packageSetup recorder sessionState rootDir (getCacheDirs sessionLoadingOptions) initEmptyHscEnv (hieYaml, cfp, opts) +session :: + Recorder (WithPriority Log) -> + SessionShake -> + SessionState -> + TVar (Hashed KnownTargets) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> + SessionM () +session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, libDir) = do + let initEmptyHscEnv = emptyHscEnv libDir + (new_deps, old_deps) <- packageSetup recorder sessionState initEmptyHscEnv (hieYaml, cfp, opts) -- For each component, now make a new HscEnvEq which contains the -- HscEnv for the hie.yaml file but the DynFlags for that component -- For GHC's supporting multi component sessions, we create a shared -- HscEnv but set the active component accordingly hscEnv <- initEmptyHscEnv + ideOptions <- asks sessionIdeOptions let new_cache = newComponentCache recorder (optExtensions ideOptions) cfp hscEnv - all_target_details <- new_cache old_deps new_deps - (all_targets, this_flags_map) <- addErrorTargetIfUnknown all_target_details hieYaml cfp + all_target_details <- liftIO $ new_cache old_deps new_deps + (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp - handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + liftIO $ handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets -- Typecheck all files in the project on startup - loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + liftIO $ loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets -- | Create a new HscEnv from a hieYaml root and a set of options -packageSetup recorder sessionState rootDir getCacheDirs newEmptyHscEnv (hieYaml, cfp, opts) = do +packageSetup :: Recorder (WithPriority Log) -> SessionState -> SessionM HscEnv -> (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> SessionM ([ComponentInfo], [ComponentInfo]) +packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do + getCacheDirs <- asks (getCacheDirs . sessionLoadingOptions) + rootDir <- asks sessionRootDir -- Parse DynFlags for the newly discovered component hscEnv <- newEmptyHscEnv - newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir + newTargetDfs <- liftIO $ evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- getDependencyInfo deps + dep_info <- liftIO $ getDependencyInfo deps -- Now lookup to see whether we are combining with an existing HscEnv -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv -- (unitId, DynFlag, Targets) - modifyVar (hscEnvs sessionState) $ + liftIO $ modifyVar (hscEnvs sessionState) $ addComponentInfo recorder getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) +addComponentInfo :: + MonadUnliftIO m => + Recorder (WithPriority Log) -> + (String -> [String] -> IO CacheDirs) -> + DependencyInfo -> + NonEmpty (DynFlags, [GHC.Target]) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> + Map.Map (Maybe FilePath) [RawComponentInfo] -> + m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do -- Just deps if there's already an HscEnv -- Nothing is it's the first time we are making an HscEnv @@ -876,6 +916,7 @@ addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) let (new,old) = NE.splitAt (NE.length new_deps) all_deps' pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) +addErrorTargetIfUnknown :: Foldable t => t [TargetDetails] -> Maybe FilePath -> NormalizedFilePath -> IO ([TargetDetails], HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) addErrorTargetIfUnknown all_target_details hieYaml cfp = do let flags_map' = HM.fromList (concatMap toFlagsMap all_targets') all_targets' = concat all_target_details @@ -898,6 +939,7 @@ addErrorTargetIfUnknown all_target_details hieYaml cfp = do -- | Populate the knownTargetsVar with all the -- files in the project so that `knownFiles` can learn about them and -- we can generate a complete module graph +extendKnownTargets :: Recorder (WithPriority Log) -> TVar (Hashed KnownTargets) -> [TargetDetails] -> IO Key extendKnownTargets recorder knownTargetsVar newTargets = do knownTargets <- concatForM newTargets $ \TargetDetails{..} -> case targetTarget of @@ -931,6 +973,7 @@ extendKnownTargets recorder knownTargetsVar newTargets = do logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) return $ toNoFileKey GetKnownTargets +loadKnownTargets :: Recorder (WithPriority Log) -> SessionShake -> IO Bool -> TVar (Hashed KnownTargets) -> [ComponentInfo] -> [TargetDetails] -> IO () loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do checkProject <- getCheckProject @@ -951,12 +994,23 @@ loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) return [keys1, keys2] -loadCradleWithNotifications recorder (IdeTesting isTesting) lspEnv sessionState sessionPref loadCradle rootDir hieYaml cfp= do +loadCradleWithNotifications :: + Recorder (WithPriority Log) -> + SessionState -> + (Maybe FilePath -> FilePath -> IO (Cradle Void)) -> + Maybe FilePath -> + FilePath -> + SessionM (Cradle Void, Either [CradleError] (ComponentOptions, FilePath, String)) +loadCradleWithNotifications recorder sessionState loadCradle hieYaml cfp = do + IdeTesting isTesting <- asks (optTesting . sessionIdeOptions) + sessionPref <- asks (sessionLoading . sessionClientConfig) + lspEnv <- asks sessionLspContext + rootDir <- asks sessionRootDir let lfpLog = makeRelative rootDir cfp logWith recorder Info $ LogCradlePath lfpLog when (isNothing hieYaml) $ logWith recorder Warning $ LogCradleNotFound lfpLog - cradle <- loadCradle recorder hieYaml rootDir + cradle <- liftIO $ loadCradle hieYaml rootDir when (isTesting) $ mRunLspT lspEnv $ sendNotification (SMethod_CustomMethod (Proxy @"ghcide/cradle/loaded")) (toJSON cfp) @@ -964,11 +1018,11 @@ loadCradleWithNotifications recorder (IdeTesting isTesting) lspEnv sessionState let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) <> " (for " <> T.pack lfpLog <> ")" - extraToLoads <- getExtraFilesToLoad sessionState cfp + extraToLoads <- liftIO $ getExtraFilesToLoad sessionState cfp eopts <- mRunLspTCallback lspEnv (\act -> withIndefiniteProgress progMsg Nothing NotCancellable (const act)) $ withTrace "Load cradle" $ \addTag -> do addTag "file" lfpLog - res <- cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads + res <- liftIO $ cradleToOptsAndLibDir recorder sessionPref cradle cfp extraToLoads addTag "result" (show res) return res pure (cradle, eopts) @@ -1008,11 +1062,12 @@ cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do PreferSingleComponentLoading -> LoadFile PreferMultiComponentLoading -> LoadWithContext old_fps -emptyHscEnv :: NameCache -> FilePath -> IO HscEnv -emptyHscEnv nc libDir = do +emptyHscEnv :: FilePath -> SessionM HscEnv +emptyHscEnv libDir = do + nc <- asks sessionSharedNameCache -- We call setSessionDynFlags so that the loader is initialised -- We need to do this before we call initUnits. - env <- runGhc (Just libDir) $ + env <- liftIO $ runGhc (Just libDir) $ getSessionDynFlags >>= setSessionDynFlags >> getSession pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) From 1aa1e2c55b621e8edad45baf4dfc0ebe1124005e Mon Sep 17 00:00:00 2001 From: fendor Date: Fri, 4 Jul 2025 20:10:33 +0200 Subject: [PATCH 61/90] Extract ghc specific functions into separate module --- ghcide/ghcide.cabal | 2 + .../session-loader/Development/IDE/Session.hs | 541 +----------------- .../Development/IDE/Session/Dependency.hs | 35 ++ .../Development/IDE/Session/Ghc.hs | 522 +++++++++++++++++ 4 files changed, 587 insertions(+), 513 deletions(-) create mode 100644 ghcide/session-loader/Development/IDE/Session/Dependency.hs create mode 100644 ghcide/session-loader/Development/IDE/Session/Ghc.hs diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 8218883077..dead03f36c 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -180,7 +180,9 @@ library Development.IDE.Plugin.Test Development.IDE.Plugin.TypeLenses Development.IDE.Session + Development.IDE.Session.Dependency Development.IDE.Session.Diagnostics + Development.IDE.Session.Ghc Development.IDE.Session.Implicit Development.IDE.Spans.AtPoint Development.IDE.Spans.Common diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index abeb9cb5d1..996f757303 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -1,4 +1,3 @@ -{-# LANGUAGE CPP #-} {-# LANGUAGE TypeFamilies #-} {-| @@ -31,40 +30,31 @@ import Data.Aeson hiding (Error, Key) import qualified Data.ByteString.Base16 as B16 import qualified Data.ByteString.Char8 as B import Data.Default -import Data.Either.Extra -import Data.Function import Data.Hashable hiding (hash) import qualified Data.HashMap.Strict as HM import Data.List import Data.List.Extra as L -import Data.List.NonEmpty (NonEmpty (..)) -import qualified Data.List.NonEmpty as NE import qualified Data.Map.Strict as Map import Data.Maybe import Data.Proxy import qualified Data.Text as T -import Data.Time.Clock import Data.Version import Development.IDE.Core.RuleTypes import Development.IDE.Core.Shake hiding (Log, knownTargets, withHieDb) import qualified Development.IDE.GHC.Compat as Compat -import Development.IDE.GHC.Compat.CmdLine import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, TargetModule, Var, Warning, getOptions) -import qualified Development.IDE.GHC.Compat.Core as GHC import Development.IDE.GHC.Compat.Env hiding (Logger) -import Development.IDE.GHC.Compat.Units (UnitId) import Development.IDE.GHC.Util import Development.IDE.Graph (Action, Key) import qualified Development.IDE.Session.Implicit as GhcIde import Development.IDE.Types.Diagnostics import Development.IDE.Types.Exports -import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.Location import Development.IDE.Types.Options -import GHC.ResponseFile import qualified HIE.Bios as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) @@ -92,24 +82,20 @@ import Data.Void import Control.Concurrent.STM.Stats (atomically, modifyTVar', readTVar, writeTVar) import Control.Concurrent.STM.TQueue -import Control.DeepSeq -import Control.Exception (evaluate) -import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Trans.Cont (ContT (ContT, runContT)) import Data.Foldable (for_) import Data.HashMap.Strict (HashMap) import Data.HashSet (HashSet) import qualified Data.HashSet as Set -import qualified Data.Set as OS import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) import Development.IDE.Core.WorkerThread (withWorkerQueue) -import qualified Development.IDE.GHC.Compat.Util as Compat import Development.IDE.Session.Diagnostics (renderCradleError) +import Development.IDE.Session.Dependency +import Development.IDE.Session.Ghc hiding (Log) import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..), toNoFileKey) -import GHC.Data.Graph.Directed import HieDb.Create import HieDb.Types import Ide.PluginUtils (toAbsolute) @@ -121,17 +107,9 @@ import Control.Concurrent.STM (STM, TVar) import qualified Control.Monad.STM as STM import qualified Development.IDE.Session.OrderedSet as S import qualified Focus -import GHC.Driver.Env (hsc_all_home_unit_ids) -import GHC.Driver.Errors.Types -import GHC.Types.Error (errMsgDiagnostic, - singleMessage) -import GHC.Unit.State import qualified StmContainers.Map as STM import Control.Monad.Trans.Reader - -#if MIN_VERSION_ghc(9,13,0) -import GHC.Driver.Make (checkHomeUnitsClosed) -#endif +import qualified Development.IDE.Session.Ghc as Ghc data Log = LogSettingInitialDynFlags @@ -141,16 +119,12 @@ data Log | LogHieDbRetriesExhausted !Int !Int !Int !SomeException | LogHieDbWriterThreadSQLiteError !SQLError | LogHieDbWriterThreadException !SomeException - | LogInterfaceFilesCacheDir !FilePath | LogKnownFilesUpdated !(HashMap Target (HashSet NormalizedFilePath)) - | LogMakingNewHscEnv ![UnitId] - | LogDLLLoadError !String | LogCradlePath !FilePath | LogCradleNotFound !FilePath | LogSessionLoadingResult !(Either [CradleError] (ComponentOptions, FilePath, String)) | LogCradle !(Cradle Void) | LogNoneCradleFound FilePath - | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) | LogHieBios HieBios.Log | LogSessionLoadingChanged | LogSessionNewLoadedFiles ![FilePath] @@ -158,6 +132,7 @@ data Log | LogGetOptionsLoop !FilePath | LogLookupSessionCache !FilePath | LogTime !String + | LogSessionGhc Ghc.Log deriving instance Show Log instance Pretty Log where @@ -209,18 +184,12 @@ instance Pretty Log where vcat [ "HieDb writer thread exception:" , pretty (displayException e) ] - LogInterfaceFilesCacheDir path -> - "Interface files cache directory:" <+> pretty path LogKnownFilesUpdated targetToPathsMap -> nest 2 $ vcat [ "Known files updated:" , viaShow $ (HM.map . Set.map) fromNormalizedFilePath targetToPathsMap ] - LogMakingNewHscEnv inPlaceUnitIds -> - "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) - LogDLLLoadError errorString -> - "Error dynamically loading libm.so.6:" <+> pretty errorString LogCradlePath path -> "Cradle path:" <+> pretty path LogCradleNotFound path -> @@ -232,9 +201,8 @@ instance Pretty Log where "Session loading result:" <+> viaShow e LogCradle cradle -> "Cradle:" <+> viaShow cradle - LogNewComponentCache componentCache -> - "New component cache HscEnvEq:" <+> viaShow componentCache LogHieBios msg -> pretty msg + LogSessionGhc msg -> pretty msg LogSessionLoadingChanged -> "Session Loading config changed, reloading the full session." @@ -242,9 +210,6 @@ instance Pretty Log where hiedbDataVersion :: String hiedbDataVersion = "1" -data CacheDirs = CacheDirs - { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} - data SessionLoadingOptions = SessionLoadingOptions { findCradle :: FilePath -> IO (Maybe FilePath) -- | Load the cradle with an optional 'hie.yaml' location. @@ -733,18 +698,22 @@ data SessionEnv = SessionEnv type SessionM = ReaderT SessionEnv IO --- | The main function which gets options for a file. We only want one of these running --- at a time. Therefore the IORef contains the currently running cradle, if we try --- to get some more options then we wait for the currently running action to finish --- before attempting to do so. +-- | The main function which gets options for a file. +-- +-- The general approach is as follows: +-- 1. Find the 'hie.yaml' for the next file target, if there is any. +-- 2. Check in the cache, whether the given 'hie.yaml' was already loaded before +-- 3.1. If it wasn't, initialise a new session and continue with step 4. +-- 3.2. If it is loaded, check whether we need to reload the session, e.g. because the `.cabal` file was modified +-- 3.2.1. If we need to reload, remove the getOptionsLoop :: Recorder (WithPriority Log) -> SessionShake -> SessionState -> TVar (Hashed KnownTargets) -> SessionM () getOptionsLoop recorder sessionShake sessionState knownTargetsVar = forever $ do - sessionLoadingOptions <- asks sessionLoadingOptions -- Get the next file to load file <- liftIO $ atomically $ S.readQueue (pendingFiles sessionState) logWith recorder Debug (LogGetOptionsLoop file) let ncfp = toNormalizedFilePath' file cachedHieYamlLocation <- join <$> liftIO (atomically (STM.lookup ncfp (filesMap sessionState))) + sessionLoadingOptions <- asks sessionLoadingOptions hieYaml <- liftIO $ findCradle sessionLoadingOptions file let hieLoc = cachedHieYamlLocation <|> hieYaml sessionOpts recorder sessionShake sessionState knownTargetsVar (hieLoc, file) @@ -780,8 +749,9 @@ sessionOpts recorder sessionShake sessionState knownTargetsVar (hieYaml, file) = -- Keep the same name cache liftIO $ modifyVar_ (hscEnvs sessionState) (return . Map.adjust (const []) hieYaml) consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file - -- if deps are ok, we can just remove the file from pending files - else liftIO $ atomically $ removeFromPending sessionState file + else do + -- if deps are ok, we can just remove the file from pending files + liftIO $ atomically $ removeFromPending sessionState file Nothing -> consultCradle recorder sessionShake sessionState knownTargetsVar hieYaml file @@ -835,7 +805,7 @@ session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) -> SessionM () session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, libDir) = do - let initEmptyHscEnv = emptyHscEnv libDir + let initEmptyHscEnv = emptyHscEnvM libDir (new_deps, old_deps) <- packageSetup recorder sessionState initEmptyHscEnv (hieYaml, cfp, opts) -- For each component, now make a new HscEnvEq which contains the @@ -844,7 +814,7 @@ session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, l -- HscEnv but set the active component accordingly hscEnv <- initEmptyHscEnv ideOptions <- asks sessionIdeOptions - let new_cache = newComponentCache recorder (optExtensions ideOptions) cfp hscEnv + let new_cache = newComponentCache (cmapWithPrio LogSessionGhc recorder) (optExtensions ideOptions) cfp hscEnv all_target_details <- liftIO $ new_cache old_deps new_deps (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp @@ -867,54 +837,7 @@ packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do -- information about other components loaded into the HscEnv -- (unitId, DynFlag, Targets) liftIO $ modifyVar (hscEnvs sessionState) $ - addComponentInfo recorder getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) - -addComponentInfo :: - MonadUnliftIO m => - Recorder (WithPriority Log) -> - (String -> [String] -> IO CacheDirs) -> - DependencyInfo -> - NonEmpty (DynFlags, [GHC.Target]) -> - (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> - Map.Map (Maybe FilePath) [RawComponentInfo] -> - m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) -addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do - -- Just deps if there's already an HscEnv - -- Nothing is it's the first time we are making an HscEnv - let oldDeps = Map.lookup hieYaml m - let -- Add the raw information about this component to the list - -- We will modify the unitId and DynFlags used for - -- compilation but these are the true source of - -- information. - new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags - all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps - -- Get all the unit-ids for things in this component - - all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do - let prefix = show rawComponentUnitId - -- See Note [Avoiding bad interface files] - let cacheDirOpts = componentOptions opts - cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts - processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags - -- The final component information, mostly the same but the DynFlags don't - -- contain any packages which are also loaded - -- into the same component. - pure $ ComponentInfo - { componentUnitId = rawComponentUnitId - , componentDynFlags = processed_df - , componentTargets = rawComponentTargets - , componentFP = rawComponentFP - , componentCOptions = rawComponentCOptions - , componentDependencyInfo = rawComponentDependencyInfo - } - -- Modify the map so the hieYaml now maps to the newly updated - -- ComponentInfos - -- Returns - -- . The information for the new component which caused this cache miss - -- . The modified information (without -inplace flags) for - -- existing packages - let (new,old) = NE.splitAt (NE.length new_deps) all_deps' - pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + addComponentInfo (cmapWithPrio LogSessionGhc recorder) getCacheDirs dep_info newTargetDfs (hieYaml, cfp, opts) addErrorTargetIfUnknown :: Foldable t => t [TargetDetails] -> Maybe FilePath -> NormalizedFilePath -> IO ([TargetDetails], HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) addErrorTargetIfUnknown all_target_details hieYaml cfp = do @@ -1062,255 +985,20 @@ cradleToOptsAndLibDir recorder loadConfig cradle file old_fps = do PreferSingleComponentLoading -> LoadFile PreferMultiComponentLoading -> LoadWithContext old_fps -emptyHscEnv :: FilePath -> SessionM HscEnv -emptyHscEnv libDir = do - nc <- asks sessionSharedNameCache - -- We call setSessionDynFlags so that the loader is initialised - -- We need to do this before we call initUnits. - env <- liftIO $ runGhc (Just libDir) $ - getSessionDynFlags >>= setSessionDynFlags >> getSession - pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) - -data TargetDetails = TargetDetails - { - targetTarget :: !Target, - targetEnv :: !(IdeResult HscEnvEq), - targetDepends :: !DependencyInfo, - targetLocations :: ![NormalizedFilePath] - } +-- ---------------------------------------------------------------------------- +-- Utilities +-- ---------------------------------------------------------------------------- -fromTargetId :: [FilePath] -- ^ import paths - -> [String] -- ^ extensions to consider - -> TargetId - -> IdeResult HscEnvEq - -> DependencyInfo - -> IO [TargetDetails] --- For a target module we consider all the import paths -fromTargetId is exts (GHC.TargetModule modName) env dep = do - let fps = [i moduleNameSlashes modName -<.> ext <> boot - | ext <- exts - , i <- is - , boot <- ["", "-boot"] - ] - let locs = fmap toNormalizedFilePath' fps - return [TargetDetails (TargetModule modName) env dep locs] --- For a 'TargetFile' we consider all the possible module names -fromTargetId _ _ (GHC.TargetFile f _) env deps = do - let nf = toNormalizedFilePath' f - let other - | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) - | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") - return [TargetDetails (TargetFile nf) env deps [nf, other]] +emptyHscEnvM :: FilePath -> SessionM HscEnv +emptyHscEnvM libDir = do + nc <- asks sessionSharedNameCache + liftIO $ Ghc.emptyHscEnv nc libDir toFlagsMap :: TargetDetails -> [(NormalizedFilePath, (IdeResult HscEnvEq, DependencyInfo))] toFlagsMap TargetDetails{..} = [ (l, (targetEnv, targetDepends)) | l <- targetLocations] - -setNameCache :: NameCache -> HscEnv -> HscEnv -setNameCache nc hsc = hsc { hsc_NC = nc } - -#if MIN_VERSION_ghc(9,13,0) --- Moved back to implementation in GHC. -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] -checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue -#else --- This function checks the important property that if both p and q are home units --- then any dependency of p, which transitively depends on q is also a home unit. --- GHC had an implementation of this function, but it was horribly inefficient --- We should move back to the GHC implementation on compilers where --- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included -checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) -checkHomeUnitsClosed' ue home_id_set - | OS.null bad_unit_ids = Nothing - | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) - where - bad_unit_ids = upwards_closure OS.\\ home_id_set - rootLoc = mkGeneralSrcSpan (Compat.fsLit "") - - graph :: Graph (Node UnitId UnitId) - graph = graphFromEdgedVerticesUniq graphNodes - - -- downwards closure of graph - downwards_closure - = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) - | (uid, deps) <- Map.toList (allReachable graph node_key)] - - inverse_closure = transposeG downwards_closure - - upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] - - all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) - all_unit_direct_deps - = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue - where - go rest this this_uis = - plusUniqMap_C OS.union - (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) - rest - where - external_depends = mapUniqMap (OS.fromList . unitDepends) -#if !MIN_VERSION_ghc(9,7,0) - $ listToUniqMap $ Map.toList -#endif - - $ unitInfoMap this_units - this_units = homeUnitEnv_units this_uis - this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] - - graphNodes :: [Node UnitId UnitId] - graphNodes = go OS.empty home_id_set - where - go done todo - = case OS.minView todo of - Nothing -> [] - Just (uid, todo') - | OS.member uid done -> go done todo' - | otherwise -> case lookupUniqMap all_unit_direct_deps uid of - Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) - Just depends -> - let todo'' = (depends OS.\\ done) `OS.union` todo' - in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' -#endif - --- | Create a mapping from FilePaths to HscEnvEqs --- This combines all the components we know about into --- an appropriate session, which is a multi component --- session on GHC 9.4+ -newComponentCache - :: Recorder (WithPriority Log) - -> [String] -- ^ File extensions to consider - -> NormalizedFilePath -- ^ Path to file that caused the creation of this component - -> HscEnv -- ^ An empty HscEnv - -> [ComponentInfo] -- ^ New components to be loaded - -> [ComponentInfo] -- ^ old, already existing components - -> IO [ [TargetDetails] ] -newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do - let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) - -- When we have multiple components with the same uid, - -- prefer the new one over the old. - -- However, we might have added some targets to the old unit - -- (see special target), so preserve those - unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } - mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) - let dfs = map componentDynFlags $ Map.elems cis - uids = Map.keys cis - logWith recorder Info $ LogMakingNewHscEnv uids - hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 - Compat.initUnits dfs hsc_env - - let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') - closure_err_to_multi_err err = - ideErrorWithSource - (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp - (T.pack (Compat.printWithoutUniques (singleMessage err))) - (Just (fmap GhcDriverMessage err)) - multi_errs = map closure_err_to_multi_err closure_errs - bad_units = OS.fromList $ concat $ do - x <- map errMsgDiagnostic closure_errs - DriverHomePackagesNotClosed us <- pure x - pure us - isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units - -- Whenever we spin up a session on Linux, dynamically load libm.so.6 - -- in. We need this in case the binary is statically linked, in which - -- case the interactive session will fail when trying to load - -- ghc-prim, which happens whenever Template Haskell is being - -- evaluated or haskell-language-server's eval plugin tries to run - -- some code. If the binary is dynamically linked, then this will have - -- no effect. - -- See https://github.com/haskell/haskell-language-server/issues/221 - -- We need to do this after the call to setSessionDynFlags initialises - -- the loader - when (os == "linux") $ do - initObjLinker hscEnv' - res <- loadDLL hscEnv' "libm.so.6" - case res of - Nothing -> pure () - Just err -> logWith recorder Error $ LogDLLLoadError err - - forM (Map.elems cis) $ \ci -> do - let df = componentDynFlags ci - thisEnv <- do - -- In GHC 9.4 we have multi component support, and we have initialised all the units - -- above. - -- We just need to set the current unit here - pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' - henv <- newHscEnvEq thisEnv - let targetEnv = (if isBad ci then multi_errs else [], Just henv) - targetDepends = componentDependencyInfo ci - logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) - evaluate $ liftRnf rwhnf $ componentTargets ci - - let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends - ctargets <- concatMapM mk (componentTargets ci) - - return (L.nubOrdOn targetTarget ctargets) - -{- Note [Avoiding bad interface files] -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Originally, we set the cache directory for the various components once -on the first occurrence of the component. -This works fine if these components have no references to each other, -but you have components that depend on each other, the interface files are -updated for each component. -After restarting the session and only opening the component that depended -on the other, suddenly the interface files of this component are stale. -However, from the point of view of `ghcide`, they do not look stale, -thus, not regenerated and the IDE shows weird errors such as: -``` -typecheckIface -Declaration for Rep_ClientRunFlags -Axiom branches Rep_ClientRunFlags: - Failed to load interface for ‘Distribution.Simple.Flag’ - Use -v to see a list of the files searched for. -``` -and -``` -expectJust checkFamInstConsistency -CallStack (from HasCallStack): - error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes - expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst -``` - -To mitigate this, we set the cache directory for each component dependent -on the components of the current `HscEnv`, additionally to the component options -of the respective components. -Assume two components, c1, c2, where c2 depends on c1, and the options of the -respective components are co1, co2. -If we want to load component c2, followed by c1, we set the cache directory for -each component in this way: - - * Load component c2 - * (Cache Directory State) - - name of c2 + co2 - * Load component c1 - * (Cache Directory State) - - name of c2 + name of c1 + co2 - - name of c2 + name of c1 + co1 - -Overall, we created three cache directories. If we opened c1 first, then we -create a fourth cache directory. -This makes sure that interface files are always correctly updated. - -Since this causes a lot of recompilation, we only update the cache-directory, -if the dependencies of a component have really changed. -E.g. when you load two executables, they can not depend on each other. They -should be filtered out, such that we dont have to re-compile everything. --} - --- | Set the cache-directory based on the ComponentOptions and a list of --- internal packages. --- For the exact reason, see Note [Avoiding bad interface files]. -setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags -setCacheDirs recorder CacheDirs{..} dflags = do - logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) - pure $ dflags - & maybe id setHiDir hiCacheDir - & maybe id setHieDir hieCacheDir - & maybe id setODir oCacheDir - -- See Note [Multi Cradle Dependency Info] -type DependencyInfo = Map.Map FilePath (Maybe UTCTime) type HieMap = Map.Map (Maybe FilePath) [RawComponentInfo] -- | Maps a "hie.yaml" location to all its Target Filepaths and options. type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) @@ -1318,67 +1006,6 @@ type FlagsMap = STM.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResu -- It aims to be the reverse of 'FlagsMap'. type FilesMap = STM.Map NormalizedFilePath (Maybe FilePath) - --- This is pristine information about a component -data RawComponentInfo = RawComponentInfo - { rawComponentUnitId :: UnitId - -- | Unprocessed DynFlags. Contains inplace packages such as libraries. - -- We do not want to use them unprocessed. - , rawComponentDynFlags :: DynFlags - -- | All targets of this components. - , rawComponentTargets :: [GHC.Target] - -- | Filepath which caused the creation of this component - , rawComponentFP :: NormalizedFilePath - -- | Component Options used to load the component. - , rawComponentCOptions :: ComponentOptions - -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file - -- to last modification time. See Note [Multi Cradle Dependency Info]. - , rawComponentDependencyInfo :: DependencyInfo - } - --- This is processed information about the component, in particular the dynflags will be modified. -data ComponentInfo = ComponentInfo - { componentUnitId :: UnitId - -- | Processed DynFlags. Does not contain inplace packages such as local - -- libraries. Can be used to actually load this Component. - , componentDynFlags :: DynFlags - -- | All targets of this components. - , componentTargets :: [GHC.Target] - -- | Filepath which caused the creation of this component - , componentFP :: NormalizedFilePath - -- | Component Options used to load the component. - , componentCOptions :: ComponentOptions - -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file - -- to last modification time. See Note [Multi Cradle Dependency Info] - , componentDependencyInfo :: DependencyInfo - } - --- | Check if any dependency has been modified lately. -checkDependencyInfo :: DependencyInfo -> IO Bool -checkDependencyInfo old_di = do - di <- getDependencyInfo (Map.keys old_di) - return (di == old_di) - --- Note [Multi Cradle Dependency Info] --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- Why do we implement our own file modification tracking here? --- The primary reason is that the custom caching logic is quite complicated and going into shake --- adds even more complexity and more indirection. I did try for about 5 hours to work out how to --- use shake rules rather than IO but eventually gave up. - --- | Computes a mapping from a filepath to its latest modification date. --- See Note [Multi Cradle Dependency Info] why we do this ourselves instead --- of letting shake take care of it. -getDependencyInfo :: [FilePath] -> IO DependencyInfo -getDependencyInfo fs = Map.fromList <$> mapM do_one fs - - where - safeTryIO :: IO a -> IO (Either IOException a) - safeTryIO = Safe.try - - do_one :: FilePath -> IO (FilePath, Maybe UTCTime) - do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) - -- | Memoize an IO function, with the characteristics: -- -- * If multiple people ask for a result simultaneously, make sure you only compute it once. @@ -1396,118 +1023,6 @@ memoIO op = do return (Map.insert k res mp, res) Just res -> return (mp, res) -unit_flags :: [Flag (CmdLineP [String])] -unit_flags = [defFlag "unit" (SepArg addUnit)] - -addUnit :: String -> EwM (CmdLineP [String]) () -addUnit unit_str = liftEwM $ do - units <- getCmdLineState - putCmdLineState (unit_str : units) - --- | Throws if package flags are unsatisfiable -setOptions :: GhcMonad m - => NormalizedFilePath - -> ComponentOptions - -> DynFlags - -> FilePath -- ^ root dir, see Note [Root Directory] - -> m (NonEmpty (DynFlags, [GHC.Target])) -setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do - ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) - case NE.nonEmpty units of - Just us -> initMulti us - Nothing -> do - (df, targets) <- initOne (map unLoc theOpts') - -- A special target for the file which caused this wonderful - -- component to be created. In case the cradle doesn't list all the targets for - -- the component, in which case things will be horribly broken anyway. - -- - -- When we have a singleComponent that is caused to be loaded due to a - -- file, we assume the file is part of that component. This is useful - -- for bare GHC sessions, such as many of the ones used in the testsuite - -- - -- We don't do this when we have multiple components, because each - -- component better list all targets or there will be anarchy. - -- It is difficult to know which component to add our file to in - -- that case. - -- Multi unit arguments are likely to come from cabal, which - -- does list all targets. - -- - -- If we don't end up with a target for the current file in the end, then - -- we will report it as an error for that file - let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) - let special_target = Compat.mkSimpleTarget df abs_fp - pure $ (df, special_target : targets) :| [] - where - initMulti unitArgFiles = - forM unitArgFiles $ \f -> do - args <- liftIO $ expandResponse [f] - initOne args - initOne this_opts = do - (dflags', targets') <- addCmdOpts this_opts dflags - let dflags'' = - case unitIdString (homeUnitId_ dflags') of - -- cabal uses main for the unit id of all executable packages - -- This makes multi-component sessions confused about what - -- options to use for that component. - -- Solution: hash the options and use that as part of the unit id - -- This works because there won't be any dependencies on the - -- executable unit. - "main" -> - let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) - hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) - in setHomeUnitId_ hashed_uid dflags' - _ -> dflags' - - let targets = makeTargetsAbsolute root targets' - root = case workingDirectory dflags'' of - Nothing -> compRoot - Just wdir -> compRoot wdir - let dflags''' = - setWorkingDirectory root $ - disableWarningsAsErrors $ - -- disabled, generated directly by ghcide instead - flip gopt_unset Opt_WriteInterface $ - -- disabled, generated directly by ghcide instead - -- also, it can confuse the interface stale check - dontWriteHieFiles $ - setIgnoreInterfacePragmas $ - setBytecodeLinkerOptions $ - disableOptimisation $ - Compat.setUpTypedHoles $ - makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory - dflags'' - return (dflags''', targets) - -setIgnoreInterfacePragmas :: DynFlags -> DynFlags -setIgnoreInterfacePragmas df = - gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges - -disableOptimisation :: DynFlags -> DynFlags -disableOptimisation df = updOptLevel 0 df - -setHiDir :: FilePath -> DynFlags -> DynFlags -setHiDir f d = - -- override user settings to avoid conflicts leading to recompilation - d { hiDir = Just f} - -setODir :: FilePath -> DynFlags -> DynFlags -setODir f d = - -- override user settings to avoid conflicts leading to recompilation - d { objectDir = Just f} - -getCacheDirsDefault :: String -> [String] -> IO CacheDirs -getCacheDirsDefault prefix opts = do - dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) - return $ CacheDirs dir dir dir - where - -- Create a unique folder per set of different GHC options, assuming that each different set of - -- GHC options will create incompatible interface files. - opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) - --- | Sub directory for the cache path -cacheDir :: String -cacheDir = "ghcide" - ---------------------------------------------------------------------------------------------------- data PackageSetupException diff --git a/ghcide/session-loader/Development/IDE/Session/Dependency.hs b/ghcide/session-loader/Development/IDE/Session/Dependency.hs new file mode 100644 index 0000000000..926e0e47b3 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Dependency.hs @@ -0,0 +1,35 @@ +module Development.IDE.Session.Dependency where + +import Control.Exception.Safe as Safe +import Data.Either.Extra +import qualified Data.Map.Strict as Map +import Data.Time.Clock +import System.Directory + +type DependencyInfo = Map.Map FilePath (Maybe UTCTime) + +-- | Check if any dependency has been modified lately. +checkDependencyInfo :: DependencyInfo -> IO Bool +checkDependencyInfo old_di = do + di <- getDependencyInfo (Map.keys old_di) + return (di == old_di) + +-- Note [Multi Cradle Dependency Info] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Why do we implement our own file modification tracking here? +-- The primary reason is that the custom caching logic is quite complicated and going into shake +-- adds even more complexity and more indirection. I did try for about 5 hours to work out how to +-- use shake rules rather than IO but eventually gave up. + +-- | Computes a mapping from a filepath to its latest modification date. +-- See Note [Multi Cradle Dependency Info] why we do this ourselves instead +-- of letting shake take care of it. +getDependencyInfo :: [FilePath] -> IO DependencyInfo +getDependencyInfo fs = Map.fromList <$> mapM do_one fs + + where + safeTryIO :: IO a -> IO (Either IOException a) + safeTryIO = Safe.try + + do_one :: FilePath -> IO (FilePath, Maybe UTCTime) + do_one fp = (fp,) . eitherToMaybe <$> safeTryIO (getModificationTime fp) diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs new file mode 100644 index 0000000000..25f377200c --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -0,0 +1,522 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Session.Ghc where + +import Control.Monad +import Control.Monad.Extra as Extra +import Control.Monad.IO.Class +import qualified Crypto.Hash.SHA1 as H +import qualified Data.ByteString.Base16 as B16 +import qualified Data.ByteString.Char8 as B +import Data.Function +import Data.List +import Data.List.Extra as L +import Data.List.NonEmpty (NonEmpty (..)) +import qualified Data.List.NonEmpty as NE +import qualified Data.Map.Strict as Map +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Core.Shake hiding (Log, knownTargets, + withHieDb) +import qualified Development.IDE.GHC.Compat as Compat +import Development.IDE.GHC.Compat.CmdLine +import Development.IDE.GHC.Compat.Core hiding (Target, TargetFile, + TargetModule, Var, + Warning, getOptions) +import qualified Development.IDE.GHC.Compat.Core as GHC +import Development.IDE.GHC.Compat.Env hiding (Logger) +import Development.IDE.GHC.Compat.Units (UnitId) +import Development.IDE.GHC.Util +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) +import Development.IDE.Types.Location +import GHC.ResponseFile +import HIE.Bios.Environment hiding (getCacheDir) +import HIE.Bios.Types hiding (Log) +import Ide.Logger (Pretty (pretty), + Priority (Debug, Error, Info), + Recorder, WithPriority, logWith, viaShow, (<+>)) +import System.Directory +import System.FilePath +import System.Info + + +import Control.DeepSeq +import Control.Exception (evaluate) +import Control.Monad.IO.Unlift (MonadUnliftIO) +import qualified Data.Set as OS +import qualified Development.IDE.GHC.Compat.Util as Compat +import Development.IDE.Session.Dependency +import GHC.Data.Graph.Directed +import Ide.PluginUtils (toAbsolute) + +import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Driver.Errors.Types +import GHC.Types.Error (errMsgDiagnostic, + singleMessage) +import GHC.Unit.State + +data Log + = LogInterfaceFilesCacheDir !FilePath + | LogMakingNewHscEnv ![UnitId] + | LogNewComponentCache !(([FileDiagnostic], Maybe HscEnvEq), DependencyInfo) + | LogDLLLoadError !String +deriving instance Show Log + +instance Pretty Log where + pretty = \case + LogInterfaceFilesCacheDir path -> + "Interface files cache directory:" <+> pretty path + LogMakingNewHscEnv inPlaceUnitIds -> + "Making new HscEnv. In-place unit ids:" <+> pretty (map show inPlaceUnitIds) + LogNewComponentCache componentCache -> + "New component cache HscEnvEq:" <+> viaShow componentCache + LogDLLLoadError errorString -> + "Error dynamically loading libm.so.6:" <+> pretty errorString + +-- This is pristine information about a component +data RawComponentInfo = RawComponentInfo + { rawComponentUnitId :: UnitId + -- | Unprocessed DynFlags. Contains inplace packages such as libraries. + -- We do not want to use them unprocessed. + , rawComponentDynFlags :: DynFlags + -- | All targets of this components. + , rawComponentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , rawComponentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , rawComponentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info]. + , rawComponentDependencyInfo :: DependencyInfo + } + +-- This is processed information about the component, in particular the dynflags will be modified. +data ComponentInfo = ComponentInfo + { componentUnitId :: UnitId + -- | Processed DynFlags. Does not contain inplace packages such as local + -- libraries. Can be used to actually load this Component. + , componentDynFlags :: DynFlags + -- | All targets of this components. + , componentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , componentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , componentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info] + , componentDependencyInfo :: DependencyInfo + } + + +unit_flags :: [Flag (CmdLineP [String])] +unit_flags = [defFlag "unit" (SepArg addUnit)] + +addUnit :: String -> EwM (CmdLineP [String]) () +addUnit unit_str = liftEwM $ do + units <- getCmdLineState + putCmdLineState (unit_str : units) + + +-- | Create a mapping from FilePaths to HscEnvEqs +-- This combines all the components we know about into +-- an appropriate session, which is a multi component +-- session on GHC 9.4+ +newComponentCache + :: Recorder (WithPriority Log) + -> [String] -- ^ File extensions to consider + -> NormalizedFilePath -- ^ Path to file that caused the creation of this component + -> HscEnv -- ^ An empty HscEnv + -> [ComponentInfo] -- ^ New components to be loaded + -> [ComponentInfo] -- ^ old, already existing components + -> IO [ [TargetDetails] ] +newComponentCache recorder exts _cfp hsc_env old_cis new_cis = do + let cis = Map.unionWith unionCIs (mkMap new_cis) (mkMap old_cis) + -- When we have multiple components with the same uid, + -- prefer the new one over the old. + -- However, we might have added some targets to the old unit + -- (see special target), so preserve those + unionCIs new_ci old_ci = new_ci { componentTargets = componentTargets new_ci ++ componentTargets old_ci } + mkMap = Map.fromListWith unionCIs . map (\ci -> (componentUnitId ci, ci)) + let dfs = map componentDynFlags $ Map.elems cis + uids = Map.keys cis + logWith recorder Info $ LogMakingNewHscEnv uids + hscEnv' <- -- Set up a multi component session with the other units on GHC 9.4 + Compat.initUnits dfs hsc_env + + let closure_errs = maybeToList $ checkHomeUnitsClosed' (hsc_unit_env hscEnv') (hsc_all_home_unit_ids hscEnv') + closure_err_to_multi_err err = + ideErrorWithSource + (Just "cradle") (Just DiagnosticSeverity_Warning) _cfp + (T.pack (Compat.printWithoutUniques (singleMessage err))) + (Just (fmap GhcDriverMessage err)) + multi_errs = map closure_err_to_multi_err closure_errs + bad_units = OS.fromList $ concat $ do + x <- map errMsgDiagnostic closure_errs + DriverHomePackagesNotClosed us <- pure x + pure us + isBad ci = (homeUnitId_ (componentDynFlags ci)) `OS.member` bad_units + -- Whenever we spin up a session on Linux, dynamically load libm.so.6 + -- in. We need this in case the binary is statically linked, in which + -- case the interactive session will fail when trying to load + -- ghc-prim, which happens whenever Template Haskell is being + -- evaluated or haskell-language-server's eval plugin tries to run + -- some code. If the binary is dynamically linked, then this will have + -- no effect. + -- See https://github.com/haskell/haskell-language-server/issues/221 + -- We need to do this after the call to setSessionDynFlags initialises + -- the loader + when (os == "linux") $ do + initObjLinker hscEnv' + res <- loadDLL hscEnv' "libm.so.6" + case res of + Nothing -> pure () + Just err -> logWith recorder Error $ LogDLLLoadError err + + forM (Map.elems cis) $ \ci -> do + let df = componentDynFlags ci + thisEnv <- do + -- In GHC 9.4 we have multi component support, and we have initialised all the units + -- above. + -- We just need to set the current unit here + pure $ hscSetActiveUnitId (homeUnitId_ df) hscEnv' + henv <- newHscEnvEq thisEnv + let targetEnv = (if isBad ci then multi_errs else [], Just henv) + targetDepends = componentDependencyInfo ci + logWith recorder Debug $ LogNewComponentCache (targetEnv, targetDepends) + evaluate $ liftRnf rwhnf $ componentTargets ci + + let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends + ctargets <- concatMapM mk (componentTargets ci) + + return (L.nubOrdOn targetTarget ctargets) + +-- | Throws if package flags are unsatisfiable +setOptions :: GhcMonad m + => NormalizedFilePath + -> ComponentOptions + -> DynFlags + -> FilePath -- ^ root dir, see Note [Root Directory] + -> m (NonEmpty (DynFlags, [GHC.Target])) +setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do + ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) + case NE.nonEmpty units of + Just us -> initMulti us + Nothing -> do + (df, targets) <- initOne (map unLoc theOpts') + -- A special target for the file which caused this wonderful + -- component to be created. In case the cradle doesn't list all the targets for + -- the component, in which case things will be horribly broken anyway. + -- + -- When we have a singleComponent that is caused to be loaded due to a + -- file, we assume the file is part of that component. This is useful + -- for bare GHC sessions, such as many of the ones used in the testsuite + -- + -- We don't do this when we have multiple components, because each + -- component better list all targets or there will be anarchy. + -- It is difficult to know which component to add our file to in + -- that case. + -- Multi unit arguments are likely to come from cabal, which + -- does list all targets. + -- + -- If we don't end up with a target for the current file in the end, then + -- we will report it as an error for that file + let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) + let special_target = Compat.mkSimpleTarget df abs_fp + pure $ (df, special_target : targets) :| [] + where + initMulti unitArgFiles = + forM unitArgFiles $ \f -> do + args <- liftIO $ expandResponse [f] + initOne args + initOne this_opts = do + (dflags', targets') <- addCmdOpts this_opts dflags + let dflags'' = + case unitIdString (homeUnitId_ dflags') of + -- cabal uses main for the unit id of all executable packages + -- This makes multi-component sessions confused about what + -- options to use for that component. + -- Solution: hash the options and use that as part of the unit id + -- This works because there won't be any dependencies on the + -- executable unit. + "main" -> + let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) + hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) + in setHomeUnitId_ hashed_uid dflags' + _ -> dflags' + + let targets = makeTargetsAbsolute root targets' + root = case workingDirectory dflags'' of + Nothing -> compRoot + Just wdir -> compRoot wdir + let dflags''' = + setWorkingDirectory root $ + disableWarningsAsErrors $ + -- disabled, generated directly by ghcide instead + flip gopt_unset Opt_WriteInterface $ + -- disabled, generated directly by ghcide instead + -- also, it can confuse the interface stale check + dontWriteHieFiles $ + setIgnoreInterfacePragmas $ + setBytecodeLinkerOptions $ + disableOptimisation $ + Compat.setUpTypedHoles $ + makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory + dflags'' + return (dflags''', targets) + +addComponentInfo :: + MonadUnliftIO m => + Recorder (WithPriority Log) -> + (String -> [String] -> IO CacheDirs) -> + DependencyInfo -> + NonEmpty (DynFlags, [GHC.Target]) -> + (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> + Map.Map (Maybe FilePath) [RawComponentInfo] -> + m (Map.Map (Maybe FilePath) [RawComponentInfo], ([ComponentInfo], [ComponentInfo])) +addComponentInfo recorder getCacheDirs dep_info newDynFlags (hieYaml, cfp, opts) m = do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = fmap (\(df, targets) -> RawComponentInfo (homeUnitId_ df) df targets cfp opts dep_info) newDynFlags + all_deps = new_deps `NE.appendList` fromMaybe [] oldDeps + -- Get all the unit-ids for things in this component + + all_deps' <- forM all_deps $ \RawComponentInfo{..} -> do + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let cacheDirOpts = componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs recorder cacheDirs rawComponentDynFlags + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo + { componentUnitId = rawComponentUnitId + , componentDynFlags = processed_df + , componentTargets = rawComponentTargets + , componentFP = rawComponentFP + , componentCOptions = rawComponentCOptions + , componentDependencyInfo = rawComponentDependencyInfo + } + -- Modify the map so the hieYaml now maps to the newly updated + -- ComponentInfos + -- Returns + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + let (new,old) = NE.splitAt (NE.length new_deps) all_deps' + pure (Map.insert hieYaml (NE.toList all_deps) m, (new,old)) + +setIgnoreInterfacePragmas :: DynFlags -> DynFlags +setIgnoreInterfacePragmas df = + gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges + +disableOptimisation :: DynFlags -> DynFlags +disableOptimisation df = updOptLevel 0 df + +setHiDir :: FilePath -> DynFlags -> DynFlags +setHiDir f d = + -- override user settings to avoid conflicts leading to recompilation + d { hiDir = Just f} + +setODir :: FilePath -> DynFlags -> DynFlags +setODir f d = + -- override user settings to avoid conflicts leading to recompilation + d { objectDir = Just f} + +data CacheDirs = CacheDirs + { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} + +{- Note [Avoiding bad interface files] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Originally, we set the cache directory for the various components once +on the first occurrence of the component. +This works fine if these components have no references to each other, +but you have components that depend on each other, the interface files are +updated for each component. +After restarting the session and only opening the component that depended +on the other, suddenly the interface files of this component are stale. +However, from the point of view of `ghcide`, they do not look stale, +thus, not regenerated and the IDE shows weird errors such as: +``` +typecheckIface +Declaration for Rep_ClientRunFlags +Axiom branches Rep_ClientRunFlags: + Failed to load interface for ‘Distribution.Simple.Flag’ + Use -v to see a list of the files searched for. +``` +and +``` +expectJust checkFamInstConsistency +CallStack (from HasCallStack): + error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes + expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst +``` + +To mitigate this, we set the cache directory for each component dependent +on the components of the current `HscEnv`, additionally to the component options +of the respective components. +Assume two components, c1, c2, where c2 depends on c1, and the options of the +respective components are co1, co2. +If we want to load component c2, followed by c1, we set the cache directory for +each component in this way: + + * Load component c2 + * (Cache Directory State) + - name of c2 + co2 + * Load component c1 + * (Cache Directory State) + - name of c2 + name of c1 + co2 + - name of c2 + name of c1 + co1 + +Overall, we created three cache directories. If we opened c1 first, then we +create a fourth cache directory. +This makes sure that interface files are always correctly updated. + +Since this causes a lot of recompilation, we only update the cache-directory, +if the dependencies of a component have really changed. +E.g. when you load two executables, they can not depend on each other. They +should be filtered out, such that we dont have to re-compile everything. +-} + + +-- | Set the cache-directory based on the ComponentOptions and a list of +-- internal packages. +-- For the exact reason, see Note [Avoiding bad interface files]. +setCacheDirs :: MonadUnliftIO m => Recorder (WithPriority Log) -> CacheDirs -> DynFlags -> m DynFlags +setCacheDirs recorder CacheDirs{..} dflags = do + logWith recorder Info $ LogInterfaceFilesCacheDir (fromMaybe cacheDir hiCacheDir) + pure $ dflags + & maybe id setHiDir hiCacheDir + & maybe id setHieDir hieCacheDir + & maybe id setODir oCacheDir + +getCacheDirsDefault :: String -> [String] -> IO CacheDirs +getCacheDirsDefault prefix opts = do + dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) + return $ CacheDirs dir dir dir + where + -- Create a unique folder per set of different GHC options, assuming that each different set of + -- GHC options will create incompatible interface files. + opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) + +setNameCache :: NameCache -> HscEnv -> HscEnv +setNameCache nc hsc = hsc { hsc_NC = nc } + +-- | Sub directory for the cache path +cacheDir :: String +cacheDir = "ghcide" + +emptyHscEnv :: NameCache -> FilePath -> IO HscEnv +emptyHscEnv nc libDir = do + -- We call setSessionDynFlags so that the loader is initialised + -- We need to do this before we call initUnits. + env <- liftIO $ runGhc (Just libDir) $ + getSessionDynFlags >>= setSessionDynFlags >> getSession + pure $ setNameCache nc (hscSetFlags ((hsc_dflags env){useUnicode = True }) env) + +-- ---------------------------------------------------------------------------- +-- Target Details +-- ---------------------------------------------------------------------------- + +data TargetDetails = TargetDetails + { + targetTarget :: !Target, + targetEnv :: !(IdeResult HscEnvEq), + targetDepends :: !DependencyInfo, + targetLocations :: ![NormalizedFilePath] + } + +fromTargetId :: [FilePath] -- ^ import paths + -> [String] -- ^ extensions to consider + -> TargetId + -> IdeResult HscEnvEq + -> DependencyInfo + -> IO [TargetDetails] +-- For a target module we consider all the import paths +fromTargetId is exts (GHC.TargetModule modName) env dep = do + let fps = [i moduleNameSlashes modName -<.> ext <> boot + | ext <- exts + , i <- is + , boot <- ["", "-boot"] + ] + let locs = fmap toNormalizedFilePath' fps + return [TargetDetails (TargetModule modName) env dep locs] +-- For a 'TargetFile' we consider all the possible module names +fromTargetId _ _ (GHC.TargetFile f _) env deps = do + let nf = toNormalizedFilePath' f + let other + | "-boot" `isSuffixOf` f = toNormalizedFilePath' (L.dropEnd 5 $ fromNormalizedFilePath nf) + | otherwise = toNormalizedFilePath' (fromNormalizedFilePath nf ++ "-boot") + return [TargetDetails (TargetFile nf) env deps [nf, other]] + +-- ---------------------------------------------------------------------------- +-- Backwards compatibility +-- ---------------------------------------------------------------------------- + +#if MIN_VERSION_ghc(9,13,0) +-- Moved back to implementation in GHC. +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> [DriverMessages] +checkHomeUnitsClosed' ue _ = checkHomeUnitsClosed ue +#else +-- This function checks the important property that if both p and q are home units +-- then any dependency of p, which transitively depends on q is also a home unit. +-- GHC had an implementation of this function, but it was horribly inefficient +-- We should move back to the GHC implementation on compilers where +-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/12162 is included +checkHomeUnitsClosed' :: UnitEnv -> OS.Set UnitId -> Maybe (Compat.MsgEnvelope DriverMessage) +checkHomeUnitsClosed' ue home_id_set + | OS.null bad_unit_ids = Nothing + | otherwise = Just (GHC.mkPlainErrorMsgEnvelope rootLoc $ DriverHomePackagesNotClosed (OS.toList bad_unit_ids)) + where + bad_unit_ids = upwards_closure OS.\\ home_id_set + rootLoc = mkGeneralSrcSpan (Compat.fsLit "") + + graph :: Graph (Node UnitId UnitId) + graph = graphFromEdgedVerticesUniq graphNodes + + -- downwards closure of graph + downwards_closure + = graphFromEdgedVerticesUniq [ DigraphNode uid uid (OS.toList deps) + | (uid, deps) <- Map.toList (allReachable graph node_key)] + + inverse_closure = transposeG downwards_closure + + upwards_closure = OS.fromList $ map node_key $ reachablesG inverse_closure [DigraphNode uid uid [] | uid <- OS.toList home_id_set] + + all_unit_direct_deps :: UniqMap UnitId (OS.Set UnitId) + all_unit_direct_deps + = unitEnv_foldWithKey go emptyUniqMap $ ue_home_unit_graph ue + where + go rest this this_uis = + plusUniqMap_C OS.union + (addToUniqMap_C OS.union external_depends this (OS.fromList this_deps)) + rest + where + external_depends = mapUniqMap (OS.fromList . unitDepends) +#if !MIN_VERSION_ghc(9,7,0) + $ listToUniqMap $ Map.toList +#endif + + $ unitInfoMap this_units + this_units = homeUnitEnv_units this_uis + this_deps = [ Compat.toUnitId unit | (unit,Just _) <- explicitUnits this_units] + + graphNodes :: [Node UnitId UnitId] + graphNodes = go OS.empty home_id_set + where + go done todo + = case OS.minView todo of + Nothing -> [] + Just (uid, todo') + | OS.member uid done -> go done todo' + | otherwise -> case lookupUniqMap all_unit_direct_deps uid of + Nothing -> pprPanic "uid not found" (Compat.ppr (uid, all_unit_direct_deps)) + Just depends -> + let todo'' = (depends OS.\\ done) `OS.union` todo' + in DigraphNode uid uid (OS.toList depends) : go (OS.insert uid done) todo'' +#endif From ed37a9ecbb7d53dd0fb1b278abc3314eb20e79e2 Mon Sep 17 00:00:00 2001 From: Guillaume Bouchard Date: Sun, 6 Jul 2025 12:04:47 +0400 Subject: [PATCH 62/90] fix: create directory to dump debug ast --- .../src/Development/IDE/Plugin/CodeAction/Util.hs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs index 40f3c76127..2a7719fdbe 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/Util.hs @@ -10,6 +10,7 @@ import Development.IDE.GHC.Compat.ExactPrint as GHC import Development.IDE.GHC.Dump (showAstDataHtml) import GHC.Stack import GHC.Utils.Outputable +import System.Directory.Extra (createDirectoryIfMissing) import System.Environment.Blank (getEnvDefault) import System.IO.Unsafe import Text.Printf @@ -37,6 +38,7 @@ traceAst lbl x doTrace = unsafePerformIO $ do u <- U.newUnique let htmlDumpFileName = printf "/tmp/hls/%s-%s-%d.html" (show timestamp) lbl (U.hashUnique u) + createDirectoryIfMissing True "/tmp/hls" writeFile htmlDumpFileName $ renderDump htmlDump return $ unlines [prettyCallStack callStack ++ ":" From dc4e674bc84fedb16e218d4e05e9519be28e0506 Mon Sep 17 00:00:00 2001 From: Guillaume Bouchard Date: Sun, 6 Jul 2025 12:04:47 +0400 Subject: [PATCH 63/90] feat(test): add a repro for 4648 --- plugins/hls-refactor-plugin/test/Main.hs | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs index da45083a08..70cea60a35 100644 --- a/plugins/hls-refactor-plugin/test/Main.hs +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -3036,6 +3036,21 @@ addFunctionConstraintTests = let , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" ] + -- See https://github.com/haskell/haskell-language-server/issues/4648 + -- When haddock comment appears after the =>, code action was introducing the + -- new constraint in the comment + incompleteConstraintSourceCodeWithCommentInTypeSignature :: T.Text -> T.Text + incompleteConstraintSourceCodeWithCommentInTypeSignature constraint = + T.unlines + + [ "module Testing where" + , "foo " + , " :: (" <> constraint <> ") =>" + , " -- This is a comment" + , " m ()" + , "foo = pure ()" + ] + missingMonadConstraint constraint = T.unlines [ "module Testing where" , "f :: " <> constraint <> "m ()" @@ -3079,6 +3094,11 @@ addFunctionConstraintTests = let "Add `Eq b` to the context of the type signature for `eq`" (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "Eq a") (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "Eq a, Eq b") + , checkCodeAction + "preexisting constraint, with haddock comment in type signature" + "Add `Applicative m` to the context of the type signature for `foo`" + (incompleteConstraintSourceCodeWithCommentInTypeSignature "") + (incompleteConstraintSourceCodeWithCommentInTypeSignature "Applicative m") , checkCodeAction "missing Monad constraint" "Add `Monad m` to the context of the type signature for `f`" From 8cac5fb170508d57b56a2041a1db4ae47202fd69 Mon Sep 17 00:00:00 2001 From: Guillaume Bouchard Date: Sun, 6 Jul 2025 15:07:00 +0400 Subject: [PATCH 64/90] fix: produce valid code when adding constraint to context This closes https://github.com/haskell/haskell-language-server/issues/4648. When adding constraint to a context which is followed by a comment, such as: ``` foo :: (Monad m) => -- | This is a comment m () ``` The comment annotation is anchored to the previous token, which is `=>` in this context. If we add a new constraint in the context, the newly generated content goes beyond the anchor and, depending on GHC version, or ghc-exactprint (the reason is not fully understood), the comment is printed BEFORE the new constraint, leading to invalid syntax, such as `(Monad m -- | This is a comment , Applicative m =>)` This commit moves all the comment of the block at the end of the block using the `followingComments` of `EpAnnComments`. It seems super adhoc, but actually, consider the following example: ```haskell bar :: -- BEFORE {- yoto -} (Monad m {- yiti -}){- yutu -} => {- yete -} -- Trailing -- After m () ``` Comment `BEFORE` and `yoto` are attached to the previous block. Comment `yiti` is attached to `Monad m`. The comments `yiti`, `yutu`, `yete`, `Trailing` and `After` are all attached to this block and will hence be moved after the block. However this is not an easy task, all the associated comments should be moved by the relevant offset. TODO: do that instead. --- .../IDE/Plugin/CodeAction/ExactPrint.hs | 25 +++++++++++++++++-- plugins/hls-refactor-plugin/test/Main.hs | 4 +-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs index 0f48a3a649..bffd2a611c 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction/ExactPrint.hs @@ -63,6 +63,7 @@ import GHC (addAnns, ann) #if MIN_VERSION_ghc(9,9,0) import GHC (NoAnn (..)) +import GHC (EpAnnComments (..)) #endif ------------------------------------------------------------------------------ @@ -170,7 +171,7 @@ appendConstraint constraintT = go . traceAst "appendConstraint" constraint <- liftParseAST df constraintT constraint <- pure $ setEntryDP constraint (SameLine 1) #if MIN_VERSION_ghc(9,9,0) - let l'' = fmap (addParensToCtxt close_dp) l' + let l'' = moveCommentsToTheEnd $ fmap (addParensToCtxt close_dp) l' #else let l'' = (fmap.fmap) (addParensToCtxt close_dp) l' #endif @@ -205,6 +206,26 @@ appendConstraint constraintT = go . traceAst "appendConstraint" return $ reLocA $ L lTop $ HsQualTy noExtField context ast +#if MIN_VERSION_ghc(9,9,0) +-- | This moves comment annotation toward the end of the block +-- This is useful when extending a block, so the comment correctly appears +-- after. +-- +-- See https://github.com/haskell/haskell-language-server/issues/4648 for +-- discussion. +-- +-- For example, the following element, @(Foo) => -- hello@, when introducing an +-- additionnal constraint, `Bar`, instead of getting `@(Foo, Bar) => -- hello@, +-- we get @(Foo, -- hello Bar) =>@ +-- +-- This is a bit painful that the pretty printer is not able to realize that it +-- introduces the token `=>` inside the comment and instead does something with +-- meaning, but that's another story. +moveCommentsToTheEnd :: EpAnn ann -> EpAnn ann +moveCommentsToTheEnd (EpAnn entry anns (EpaComments priors)) = EpAnn entry anns (EpaCommentsBalanced { priorComments = [], followingComments = priors}) +moveCommentsToTheEnd (EpAnn entry anns (EpaCommentsBalanced priors following)) = EpAnn entry anns (EpaCommentsBalanced { priorComments = [], followingComments = priors <> following}) +#endif + liftParseAST :: forall ast l. (ASTElement l ast, ExactPrint (LocatedAn l ast)) => DynFlags -> String -> TransformT (Either String) (LocatedAn l ast) @@ -500,7 +521,7 @@ extendHiding symbol (L l idecls) mlies df = do Nothing -> do #if MIN_VERSION_ghc(9,11,0) let ann :: EpAnn (AnnList (EpToken "hiding", [EpToken ","])) - ann = noAnnSrcSpanDP0 + ann = noAnnSrcSpanDP0 #elif MIN_VERSION_ghc(9,9,0) let ann = noAnnSrcSpanDP0 #else diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs index 70cea60a35..b06b41ccba 100644 --- a/plugins/hls-refactor-plugin/test/Main.hs +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -3045,7 +3045,7 @@ addFunctionConstraintTests = let [ "module Testing where" , "foo " - , " :: (" <> constraint <> ") =>" + , " :: ("<> constraint <> ") =>" , " -- This is a comment" , " m ()" , "foo = pure ()" @@ -3098,7 +3098,7 @@ addFunctionConstraintTests = let "preexisting constraint, with haddock comment in type signature" "Add `Applicative m` to the context of the type signature for `foo`" (incompleteConstraintSourceCodeWithCommentInTypeSignature "") - (incompleteConstraintSourceCodeWithCommentInTypeSignature "Applicative m") + (incompleteConstraintSourceCodeWithCommentInTypeSignature " Applicative m") , checkCodeAction "missing Monad constraint" "Add `Monad m` to the context of the type signature for `f`" From ec3c09ad9da8f196a064e34a34abf388120f648c Mon Sep 17 00:00:00 2001 From: wz1000 Date: Thu, 10 Jul 2025 19:57:16 +0530 Subject: [PATCH 65/90] hls-cabal-plugin: Fix cabal-add bound (#4642) (#4652) * hls-cabal-plugin: Fix cabal-add bound (#4642) * Update haskell-language-server.cabal --------- Co-authored-by: fendor --- haskell-language-server.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index ec397952cb..f4066dca94 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -282,7 +282,7 @@ library hls-cabal-plugin , transformers , unordered-containers >=0.2.10.0 , containers - , cabal-add + , cabal-add ^>=0.1 , process , aeson , Cabal From 0a525589b24389b882da6a72264c7b484e20adc8 Mon Sep 17 00:00:00 2001 From: Mango The Fourth <40720523+MangoIV@users.noreply.github.com> Date: Fri, 11 Jul 2025 15:31:53 +0200 Subject: [PATCH 66/90] [fix] evaluate key in lookupKeyValue to avoid reordering with newKey (#4654) --- hls-graph/src/Development/IDE/Graph/Internal/Key.hs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/hls-graph/src/Development/IDE/Graph/Internal/Key.hs b/hls-graph/src/Development/IDE/Graph/Internal/Key.hs index ba303cdb99..85cebeb110 100644 --- a/hls-graph/src/Development/IDE/Graph/Internal/Key.hs +++ b/hls-graph/src/Development/IDE/Graph/Internal/Key.hs @@ -34,6 +34,7 @@ module Development.IDE.Graph.Internal.Key ) where --import Control.Monad.IO.Class () +import Control.Exception (evaluate) import Data.Coerce import Data.Dynamic import qualified Data.HashMap.Strict as Map @@ -85,8 +86,15 @@ newKey k = unsafePerformIO $ do lookupKeyValue :: Key -> KeyValue lookupKeyValue (UnsafeMkKey x) = unsafePerformIO $ do + -- NOTE: + -- The reason for this evaluate is that the x, if not forced yet, is a thunk + -- that forces the atomicModifyIORef' in the creation of the new key. If it + -- isn't forced *before* reading the keyMap, the keyMap will only obtain the new + -- key (x) *after* the IntMap is already copied out of the keyMap reference, + -- i.e. when it is forced for the lookup in the IntMap. + k <- evaluate x GlobalKeyValueMap _ im _ <- readIORef keyMap - pure $! im IM.! x + pure $! im IM.! k {-# NOINLINE lookupKeyValue #-} From c3b61feccbc87857390b9fdb542ce0b3a701d074 Mon Sep 17 00:00:00 2001 From: fendor Date: Mon, 14 Jul 2025 14:37:18 +0200 Subject: [PATCH 67/90] Use hie-bios 0.16 (#4647) * Use hie-bios 0.16 * Strip RTS and verbosity flags after -unit flag parsing * Add RTS flags to test cases to make sure they are stripped out * pre-commit hook --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- cabal.project | 2 +- ghcide-test/data/multi-unit/a-1.0.0-inplace | 3 +++ ghcide-test/data/multi-unit/c-1.0.0-inplace | 2 ++ ghcide-test/exe/CradleTests.hs | 6 +++++- ghcide/ghcide.cabal | 2 +- ghcide/session-loader/Development/IDE/Session.hs | 6 +++++- stack-lts22.yaml | 2 +- stack.yaml | 2 +- 8 files changed, 19 insertions(+), 6 deletions(-) diff --git a/cabal.project b/cabal.project index 92954ec729..17524ede42 100644 --- a/cabal.project +++ b/cabal.project @@ -8,7 +8,7 @@ packages: ./hls-test-utils -index-state: 2025-06-16T09:44:13Z +index-state: 2025-07-09T16:51:20Z tests: True test-show-details: direct diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace index a54ea9bc4b..cab2b716ff 100644 --- a/ghcide-test/data/multi-unit/a-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -16,3 +16,6 @@ base text -XHaskell98 A ++RTS +-A32M +-RTS diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace index 7201a40de4..7421d59279 100644 --- a/ghcide-test/data/multi-unit/c-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -17,3 +17,5 @@ a-1.0.0-inplace base -XHaskell98 C ++RTS +-A32M diff --git a/ghcide-test/exe/CradleTests.hs b/ghcide-test/exe/CradleTests.hs index 046b8bbf2f..d79b90c835 100644 --- a/ghcide-test/exe/CradleTests.hs +++ b/ghcide-test/exe/CradleTests.hs @@ -117,7 +117,11 @@ simpleSubDirectoryTest = multiTests :: FilePath -> [TestTree] multiTests dir = - [simpleMultiTest dir, simpleMultiTest2 dir, simpleMultiTest3 dir, simpleMultiDefTest dir] + [ simpleMultiTest dir + , simpleMultiTest2 dir + , simpleMultiTest3 dir + , simpleMultiDefTest dir + ] multiTestName :: FilePath -> String -> String multiTestName dir name = "simple-" ++ dir ++ "-" ++ name diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 416e389f2f..2fcca48d6d 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -73,7 +73,7 @@ library , Glob , haddock-library >=1.8 && <1.12 , hashable - , hie-bios ^>=0.15.0 + , hie-bios ^>=0.16.0 , hie-compat ^>=0.3.0.0 , hiedb ^>= 0.7.0.0 , hls-graph == 2.11.0.0 diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 1bcec71181..77677ce3a0 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -67,6 +67,7 @@ import Development.IDE.Types.Location import Development.IDE.Types.Options import GHC.ResponseFile import qualified HIE.Bios as HieBios +import qualified HIE.Bios.Cradle.Utils as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) import qualified HIE.Bios.Types as HieBios @@ -1144,7 +1145,10 @@ setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do initMulti unitArgFiles = forM unitArgFiles $ \f -> do args <- liftIO $ expandResponse [f] - initOne args + -- The reponse files may contain arguments like "+RTS", + -- and hie-bios doesn't expand the response files of @-unit@ arguments. + -- Thus, we need to do the stripping here. + initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args initOne this_opts = do (dflags', targets') <- addCmdOpts this_opts dflags let dflags'' = diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 63efc35f30..16687bbf3e 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -22,7 +22,7 @@ extra-deps: - Diff-0.5 - floskell-0.11.1 - hiedb-0.7.0.0 - - hie-bios-0.15.0 + - hie-bios-0.16.0 - implicit-hie-0.1.4.0 - lsp-2.7.0.0 - lsp-test-0.17.1.0 diff --git a/stack.yaml b/stack.yaml index f6dd73d66a..145d2cd0b7 100644 --- a/stack.yaml +++ b/stack.yaml @@ -24,7 +24,7 @@ extra-deps: - floskell-0.11.1 - hiedb-0.7.0.0 - implicit-hie-0.1.4.0 - - hie-bios-0.15.0 + - hie-bios-0.16.0 - hw-fingertree-0.1.2.1 - monad-dijkstra-0.1.1.5 - retrie-1.2.3 From 2c200b42586f65bce39b207fcb0898cc087b0242 Mon Sep 17 00:00:00 2001 From: August Johansson <148627186+webdevred@users.noreply.github.com> Date: Wed, 16 Jul 2025 13:14:14 +0200 Subject: [PATCH 68/90] Show LaTeX math expressions in haddockToMarkdown (#4658) * Show LaTeX math expressions in haddockToMarkdown - Replace fallback messages with raw LaTeX math expressions using $...$ and $$...$$. - This lets editors display the original math content even if they don't render LaTeX. - No sanitization is performed, raw LaTeX is output as-is. * Do backticks for inline math and fenced latex blocks for math blocks --- ghcide/src/Development/IDE/Spans/Common.hs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs index f3e86d792d..996e55ef1a 100644 --- a/ghcide/src/Development/IDE/Spans/Common.hs +++ b/ghcide/src/Development/IDE/Spans/Common.hs @@ -190,11 +190,10 @@ haddockToMarkdown (H.DocOrderedList things) = haddockToMarkdown (H.DocDefList things) = '\n' : (unlines $ map (\(term, defn) -> "+ **" ++ haddockToMarkdown term ++ "**: " ++ haddockToMarkdown defn) things) --- we cannot render math by default -haddockToMarkdown (H.DocMathInline _) - = "*cannot render inline math formula*" -haddockToMarkdown (H.DocMathDisplay _) - = "\n\n*cannot render display math formula*\n\n" +haddockToMarkdown (H.DocMathInline s) + = "`" ++ s ++ "`" +haddockToMarkdown (H.DocMathDisplay s) + = "\n```latex\n" ++ s ++ "\n```\n" -- TODO: render tables haddockToMarkdown (H.DocTable _t) From ae5f6a7bd27771438970fb59fe0cc6996dbc0b1a Mon Sep 17 00:00:00 2001 From: Sean Gillespie Date: Fri, 18 Jul 2025 08:32:45 -0400 Subject: [PATCH 69/90] Use structured diagnostics for type wildcard fill suggestions (#4664) * Use structured diagnostics for type wildcard fill suggestions * Fix formatting * Fix compilation error for GHC-9.10+ for hls-refactor-plugin --- .../src/Development/IDE/GHC/Compat/Error.hs | 11 ++ .../IDE/Plugin/Plugins/FillTypeWildcard.hs | 132 +++++++++++------- plugins/hls-refactor-plugin/test/Main.hs | 32 ++++- 3 files changed, 116 insertions(+), 59 deletions(-) diff --git a/ghcide/src/Development/IDE/GHC/Compat/Error.hs b/ghcide/src/Development/IDE/GHC/Compat/Error.hs index e4fb9c26b4..de59afa146 100644 --- a/ghcide/src/Development/IDE/GHC/Compat/Error.hs +++ b/ghcide/src/Development/IDE/GHC/Compat/Error.hs @@ -8,6 +8,7 @@ module Development.IDE.GHC.Compat.Error ( -- * Error messages for the typechecking and renamer phase TcRnMessage (..), TcRnMessageDetailed (..), + Hole(..), stripTcRnMessageContext, -- * Parsing error message PsMessage(..), @@ -23,9 +24,14 @@ module Development.IDE.GHC.Compat.Error ( _GhcPsMessage, _GhcDsMessage, _GhcDriverMessage, + _ReportHoleError, + _TcRnIllegalWildcardInType, + _TcRnPartialTypeSignatures, _TcRnMissingSignature, _TcRnSolverReport, _TcRnMessageWithInfo, + _TypeHole, + _ConstraintHole, reportContextL, reportContentL, _MismatchMessage, @@ -38,6 +44,7 @@ import Development.IDE.GHC.Compat (Type) import GHC.Driver.Errors.Types import GHC.HsToCore.Errors.Types import GHC.Tc.Errors.Types +import GHC.Tc.Types.Constraint (Hole (..), HoleSort) import GHC.Types.Error -- | Some 'TcRnMessage's are nested in other constructors for additional context. @@ -95,6 +102,10 @@ makeLensesWith (lensRules & lensField .~ mappingNamer (pure . (++ "L"))) ''SolverReportWithCtxt +makePrisms ''TcSolverReportMsg + +makePrisms ''HoleSort + -- | Focus 'MismatchMsg' from 'TcSolverReportMsg'. Currently, 'MismatchMsg' can be -- extracted from 'CannotUnifyVariable' and 'Mismatch' constructors. _MismatchMessage :: Traversal' TcSolverReportMsg MismatchMsg diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs index 17db1f0298..0f06fff2f7 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/Plugins/FillTypeWildcard.hs @@ -2,78 +2,106 @@ module Development.IDE.Plugin.Plugins.FillTypeWildcard ( suggestFillTypeWildcard ) where -import Data.Char -import qualified Data.Text as T -import Language.LSP.Protocol.Types (Diagnostic (..), - TextEdit (TextEdit)) +import Control.Lens +import Data.Maybe (isJust) +import qualified Data.Text as T +import Development.IDE (FileDiagnostic (..), + fdStructuredMessageL, + printOutputable) +import Development.IDE.GHC.Compat hiding (vcat) +import Development.IDE.GHC.Compat.Error +import Development.IDE.Types.Diagnostics (_SomeStructuredMessage) +import GHC.Tc.Errors.Types (ErrInfo (..)) +import Language.LSP.Protocol.Types (Diagnostic (..), + TextEdit (TextEdit)) -suggestFillTypeWildcard :: Diagnostic -> [(T.Text, TextEdit)] -suggestFillTypeWildcard Diagnostic{_range=_range,..} +suggestFillTypeWildcard :: FileDiagnostic -> [(T.Text, TextEdit)] +suggestFillTypeWildcard diag@FileDiagnostic{fdLspDiagnostic = Diagnostic {..}} -- Foo.hs:3:8: error: -- * Found type wildcard `_' standing for `p -> p1 -> p' - | "Found type wildcard" `T.isInfixOf` _message - , " standing for " `T.isInfixOf` _message - , typeSignature <- extractWildCardTypeSignature _message - = [("Use type signature: ‘" <> typeSignature <> "’", TextEdit _range typeSignature)] + | isWildcardDiagnostic diag + , typeSignature <- extractWildCardTypeSignature diag = + [("Use type signature: ‘" <> typeSignature <> "’", TextEdit _range typeSignature)] | otherwise = [] +isWildcardDiagnostic :: FileDiagnostic -> Bool +isWildcardDiagnostic = + maybe False (isJust . (^? _TypeHole) . hole_sort) . diagReportHoleError + +-- | Extract the 'Hole' out of a 'FileDiagnostic' +diagReportHoleError :: FileDiagnostic -> Maybe Hole +diagReportHoleError diag = do + solverReport <- + diag + ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessage + . _TcRnSolverReport + . _1 + (hole, _) <- solverReport ^? reportContentL . _ReportHoleError + + Just hole + -- | Extract the type and surround it in parentheses except in obviously safe cases. -- -- Inferring when parentheses are actually needed around the type signature would -- require understanding both the precedence of the context of the hole and of -- the signature itself. Inserting them (almost) unconditionally is ugly but safe. -extractWildCardTypeSignature :: T.Text -> T.Text -extractWildCardTypeSignature msg - | enclosed || not isApp || isToplevelSig = sig - | otherwise = "(" <> sig <> ")" - where - msgSigPart = snd $ T.breakOnEnd "standing for " msg - (sig, rest) = T.span (/='’') . T.dropWhile (=='‘') . T.dropWhile (/='‘') $ msgSigPart - -- If we're completing something like ‘foo :: _’ parens can be safely omitted. - isToplevelSig = errorMessageRefersToToplevelHole rest - -- Parenthesize type applications, e.g. (Maybe Char). - isApp = T.any isSpace sig - -- Do not add extra parentheses to lists, tuples and already parenthesized types. - enclosed = - case T.uncons sig of +extractWildCardTypeSignature :: FileDiagnostic -> T.Text +extractWildCardTypeSignature diag = + case hole_ty <$> diagReportHoleError diag of + Just ty + | isTopLevel || not (isApp ty) || enclosed ty -> printOutputable ty + | otherwise -> "(" <> printOutputable ty <> ")" Nothing -> error "GHC provided invalid type" - Just (firstChr, _) -> not (T.null sig) && (firstChr, T.last sig) `elem` [('(', ')'), ('[', ']')] + where + isTopLevel :: Bool + isTopLevel = + maybe False errorMessageRefersToToplevelHole (diagErrInfoContext diag) + + isApp :: Type -> Bool + isApp (AppTy _ _) = True + isApp (TyConApp _ (_ : _)) = True + isApp (FunTy{}) = True + isApp _ = False + + enclosed :: Type -> Bool + enclosed (TyConApp con _) + | con == listTyCon || isTupleTyCon con = True + enclosed _ = False + +-- | Extract the 'ErrInfo' context out of a 'FileDiagnostic' and render it to +-- 'Text' +diagErrInfoContext :: FileDiagnostic -> Maybe T.Text +diagErrInfoContext diag = do + (_, detailedMsg) <- + diag + ^? fdStructuredMessageL + . _SomeStructuredMessage + . msgEnvelopeErrorL + . _TcRnMessageWithCtx + . _TcRnMessageWithInfo + let TcRnMessageDetailed err _ = detailedMsg + ErrInfo errInfoCtx _ = err + + Just (printOutputable errInfoCtx) --- | Detect whether user wrote something like @foo :: _@ or @foo :: (_, Int)@. +-- | Detect whether user wrote something like @foo :: _@ or @foo :: Maybe _@. -- The former is considered toplevel case for which the function returns 'True', -- the latter is not toplevel and the returned value is 'False'. -- --- When type hole is at toplevel then there’s a line starting with --- "• In the type signature" which ends with " :: _" like in the +-- When type hole is at toplevel then the ErrInfo context starts with +-- "In the type signature" which ends with " :: _" like in the -- following snippet: -- --- source/library/Language/Haskell/Brittany/Internal.hs:131:13: error: --- • Found type wildcard ‘_’ standing for ‘HsDecl GhcPs’ --- To use the inferred type, enable PartialTypeSignatures --- • In the type signature: decl :: _ --- In an equation for ‘splitAnnots’: --- splitAnnots m@HsModule {hsmodAnn, hsmodDecls} --- = undefined --- where --- ann :: SrcSpanAnnA --- decl :: _ --- L ann decl = head hsmodDecls --- • Relevant bindings include --- [REDACTED] +-- Just "In the type signature: decl :: _" -- -- When type hole is not at toplevel there’s a stack of where -- the hole was located ending with "In the type signature": -- --- source/library/Language/Haskell/Brittany/Internal.hs:130:20: error: --- • Found type wildcard ‘_’ standing for ‘GhcPs’ --- To use the inferred type, enable PartialTypeSignatures --- • In the first argument of ‘HsDecl’, namely ‘_’ --- In the type ‘HsDecl _’ --- In the type signature: decl :: HsDecl _ --- • Relevant bindings include --- [REDACTED] +-- Just "In the first argument of ‘HsDecl’\nIn the type signature: decl :: HsDecl _" errorMessageRefersToToplevelHole :: T.Text -> Bool errorMessageRefersToToplevelHole msg = - not (T.null prefix) && " :: _" `T.isSuffixOf` T.takeWhile (/= '\n') rest - where - (prefix, rest) = T.breakOn "• In the type signature:" msg + "In the type signature:" `T.isPrefixOf` msg + && " :: _" `T.isSuffixOf` T.takeWhile (/= '\n') msg diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs index b06b41ccba..508d480c63 100644 --- a/plugins/hls-refactor-plugin/test/Main.hs +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -701,6 +701,10 @@ typeWildCardActionTests = testGroup "type wildcard actions" "func::Integer -> Integer -> Integer" , "func x y = x + y" ] + , testNoUseTypeSignature "ignores typed holes" + [ "func :: a -> a" + , "func x = _" + ] , testGroup "add parens if hole is part of bigger type" [ testUseTypeSignature "subtype 1" [ "func :: _ -> Integer -> Integer" @@ -736,19 +740,33 @@ typeWildCardActionTests = testGroup "type wildcard actions" -- | Test session of given name, checking action "Use type signature..." -- on a test file with given content and comparing to expected result. testUseTypeSignature name textIn textOut = testSession name $ do - let fileStart = "module Testing where" + let expectedContentAfterAction = T.unlines $ fileStart : textOut content = T.unlines $ fileStart : textIn - expectedContentAfterAction = T.unlines $ fileStart : textOut doc <- createDoc "Testing.hs" "haskell" content - _ <- waitForDiagnostics - actionsOrCommands <- getAllCodeActions doc - [addSignature] <- pure [action | InR action@CodeAction { _title = actionTitle } <- actionsOrCommands - , "Use type signature" `T.isPrefixOf` actionTitle - ] + + (Just addSignature) <- getUseTypeSigAction doc executeCodeAction addSignature contentAfterAction <- documentContents doc liftIO $ expectedContentAfterAction @=? contentAfterAction + testNoUseTypeSignature name textIn = testSession name $ do + let content = T.unlines $ fileStart : textIn + doc <- createDoc "Testing.hs" "haskell" content + codeAction <- getUseTypeSigAction doc + liftIO $ Nothing @=? codeAction + + fileStart = "module Testing where" + + getUseTypeSigAction docIn = do + _ <- waitForDiagnostics + actionsOrCommands <- getAllCodeActions docIn + + let addSignatures = + [ action | InR action@CodeAction { _title = actionTitle } <- actionsOrCommands + , "Use type signature" `T.isPrefixOf` actionTitle + ] + pure $ listToMaybe addSignatures + removeImportTests :: TestTree removeImportTests = testGroup "remove import actions" From 7d106cff15a80b38897710ce0ef04a8ba735169b Mon Sep 17 00:00:00 2001 From: soulomoon Date: Sat, 19 Jul 2025 02:22:49 +0800 Subject: [PATCH 70/90] fix duplication --- .../session-loader/Development/IDE/Session.hs | 133 ++---------------- 1 file changed, 9 insertions(+), 124 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 5e9a710893..ca4cafcd4d 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -91,9 +91,9 @@ import qualified Data.HashSet as Set import Database.SQLite.Simple import Development.IDE.Core.Tracing (withTrace) import Development.IDE.Core.WorkerThread (withWorkerQueue) -import Development.IDE.Session.Diagnostics (renderCradleError) import Development.IDE.Session.Dependency -import Development.IDE.Session.Ghc hiding (Log) +import Development.IDE.Session.Diagnostics (renderCradleError) +import Development.IDE.Session.Ghc hiding (Log) import Development.IDE.Types.Shake (WithHieDb, WithHieDbShield (..), toNoFileKey) @@ -106,11 +106,11 @@ import Text.ParserCombinators.ReadP (readP_to_S) import Control.Concurrent.STM (STM, TVar) import qualified Control.Monad.STM as STM +import Control.Monad.Trans.Reader +import qualified Development.IDE.Session.Ghc as Ghc import qualified Development.IDE.Session.OrderedSet as S import qualified Focus import qualified StmContainers.Map as STM -import Control.Monad.Trans.Reader -import qualified Development.IDE.Session.Ghc as Ghc data Log = LogSettingInitialDynFlags @@ -689,12 +689,12 @@ data SessionShake = SessionShake } data SessionEnv = SessionEnv - { sessionLspContext :: Maybe (LanguageContextEnv Config) - , sessionRootDir :: FilePath - , sessionIdeOptions :: IdeOptions - , sessionClientConfig :: Config + { sessionLspContext :: Maybe (LanguageContextEnv Config) + , sessionRootDir :: FilePath + , sessionIdeOptions :: IdeOptions + , sessionClientConfig :: Config , sessionSharedNameCache :: NameCache - , sessionLoadingOptions :: SessionLoadingOptions + , sessionLoadingOptions :: SessionLoadingOptions } type SessionM = ReaderT SessionEnv IO @@ -1024,121 +1024,6 @@ memoIO op = do return (Map.insert k res mp, res) Just res -> return (mp, res) -unit_flags :: [Flag (CmdLineP [String])] -unit_flags = [defFlag "unit" (SepArg addUnit)] - -addUnit :: String -> EwM (CmdLineP [String]) () -addUnit unit_str = liftEwM $ do - units <- getCmdLineState - putCmdLineState (unit_str : units) - --- | Throws if package flags are unsatisfiable -setOptions :: GhcMonad m - => NormalizedFilePath - -> ComponentOptions - -> DynFlags - -> FilePath -- ^ root dir, see Note [Root Directory] - -> m (NonEmpty (DynFlags, [GHC.Target])) -setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do - ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) - case NE.nonEmpty units of - Just us -> initMulti us - Nothing -> do - (df, targets) <- initOne (map unLoc theOpts') - -- A special target for the file which caused this wonderful - -- component to be created. In case the cradle doesn't list all the targets for - -- the component, in which case things will be horribly broken anyway. - -- - -- When we have a singleComponent that is caused to be loaded due to a - -- file, we assume the file is part of that component. This is useful - -- for bare GHC sessions, such as many of the ones used in the testsuite - -- - -- We don't do this when we have multiple components, because each - -- component better list all targets or there will be anarchy. - -- It is difficult to know which component to add our file to in - -- that case. - -- Multi unit arguments are likely to come from cabal, which - -- does list all targets. - -- - -- If we don't end up with a target for the current file in the end, then - -- we will report it as an error for that file - let abs_fp = toAbsolute rootDir (fromNormalizedFilePath cfp) - let special_target = Compat.mkSimpleTarget df abs_fp - pure $ (df, special_target : targets) :| [] - where - initMulti unitArgFiles = - forM unitArgFiles $ \f -> do - args <- liftIO $ expandResponse [f] - -- The reponse files may contain arguments like "+RTS", - -- and hie-bios doesn't expand the response files of @-unit@ arguments. - -- Thus, we need to do the stripping here. - initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args - initOne this_opts = do - (dflags', targets') <- addCmdOpts this_opts dflags - let dflags'' = - case unitIdString (homeUnitId_ dflags') of - -- cabal uses main for the unit id of all executable packages - -- This makes multi-component sessions confused about what - -- options to use for that component. - -- Solution: hash the options and use that as part of the unit id - -- This works because there won't be any dependencies on the - -- executable unit. - "main" -> - let hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack this_opts) - hashed_uid = Compat.toUnitId (Compat.stringToUnit ("main-"++hash)) - in setHomeUnitId_ hashed_uid dflags' - _ -> dflags' - - let targets = makeTargetsAbsolute root targets' - root = case workingDirectory dflags'' of - Nothing -> compRoot - Just wdir -> compRoot wdir - let dflags''' = - setWorkingDirectory root $ - disableWarningsAsErrors $ - -- disabled, generated directly by ghcide instead - flip gopt_unset Opt_WriteInterface $ - -- disabled, generated directly by ghcide instead - -- also, it can confuse the interface stale check - dontWriteHieFiles $ - setIgnoreInterfacePragmas $ - setBytecodeLinkerOptions $ - disableOptimisation $ - Compat.setUpTypedHoles $ - makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory - dflags'' - return (dflags''', targets) - -setIgnoreInterfacePragmas :: DynFlags -> DynFlags -setIgnoreInterfacePragmas df = - gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges - -disableOptimisation :: DynFlags -> DynFlags -disableOptimisation df = updOptLevel 0 df - -setHiDir :: FilePath -> DynFlags -> DynFlags -setHiDir f d = - -- override user settings to avoid conflicts leading to recompilation - d { hiDir = Just f} - -setODir :: FilePath -> DynFlags -> DynFlags -setODir f d = - -- override user settings to avoid conflicts leading to recompilation - d { objectDir = Just f} - -getCacheDirsDefault :: String -> [String] -> IO CacheDirs -getCacheDirsDefault prefix opts = do - dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) - return $ CacheDirs dir dir dir - where - -- Create a unique folder per set of different GHC options, assuming that each different set of - -- GHC options will create incompatible interface files. - opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) - --- | Sub directory for the cache path -cacheDir :: String -cacheDir = "ghcide" - ---------------------------------------------------------------------------------------------------- data PackageSetupException From b2311ce08df56280c3be907c02d3b97c60e8e0ef Mon Sep 17 00:00:00 2001 From: fendor Date: Tue, 22 Jul 2025 09:04:02 +0200 Subject: [PATCH 71/90] Avoid unnecessary recompilation due to -haddock (#4596) * Avoid unnecessary recompilation due to -haddock Due to unprincipled adding and removing the `-haddock` flag during compilation and recompilation checking, we were performing more work than necessary. We avoid this by compiling everything with `-haddock` by default. This is safe nowadays, we have essentially been doing this for many releases, and know this is fine. For the occasion where we actually want to parse without the `-haddock` flag, we keep explicitly disabling it. We enable `-haddock` flag during session loading, since we already perform a number of DynFlags tweaks. This behaviour is dependent on the `OptHaddockParse` opton, which can, currently, only be modified at compile-time. * Fix windows test failure --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- ghcide-test/exe/FindDefinitionAndHoverTests.hs | 7 ++++--- .../session-loader/Development/IDE/Session.hs | 17 ++++++++++++++--- ghcide/src/Development/IDE/Core/Rules.hs | 14 ++++++-------- ghcide/src/Development/IDE/Types/Options.hs | 10 ++++++---- 4 files changed, 30 insertions(+), 18 deletions(-) diff --git a/ghcide-test/exe/FindDefinitionAndHoverTests.hs b/ghcide-test/exe/FindDefinitionAndHoverTests.hs index 7920ff4949..e4c0958f58 100644 --- a/ghcide-test/exe/FindDefinitionAndHoverTests.hs +++ b/ghcide-test/exe/FindDefinitionAndHoverTests.hs @@ -187,7 +187,8 @@ tests = let holeL65 = Position 65 8 ; hleInfo2 = [ExpectHoverText ["_ :: a -> Maybe a"]] cccL17 = Position 17 16 ; docLink = [ExpectHoverTextRegex "\\*Defined in 'GHC.Types'\\* \\*\\(ghc-prim-[0-9.]+\\)\\*\n\n"] imported = Position 56 13 ; importedSig = getDocUri "Foo.hs" >>= \foo -> return [ExpectHoverText ["foo", "Foo", "Haddock"], mkL foo 5 0 5 3] - reexported = Position 55 14 ; reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion < GHC910 then mkL bar 3 5 3 8 else mkL bar 3 0 3 14] + reexported = Position 55 14 + reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], if ghcVersion < GHC910 || not isWindows then mkL bar 3 5 3 8 else mkL bar 3 0 3 14] thLocL57 = Position 59 10 ; thLoc = [ExpectHoverText ["Identity"]] cmtL68 = Position 67 0 ; lackOfdEq = [ExpectHoverExcludeText ["$dEq"]] import310 = Position 3 10; pkgTxt = [ExpectHoverText ["Data.Text\n\ntext-"]] @@ -237,9 +238,9 @@ tests = let , testM yes yes imported importedSig "Imported symbol" , if isWindows then -- Flaky on Windows: https://github.com/haskell/haskell-language-server/issues/2997 - testM no yes reexported reexportedSig "Imported symbol (reexported)" + testM no yes reexported reexportedSig "Imported symbol reexported" else - testM yes yes reexported reexportedSig "Imported symbol (reexported)" + testM yes yes reexported reexportedSig "Imported symbol reexported" , test no yes thLocL57 thLoc "TH Splice Hover" , test yes yes import310 pkgTxt "show package name and its version" ] diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 77677ce3a0..fb777338b3 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -452,6 +452,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do IdeOptions{ optTesting = IdeTesting optTesting , optCheckProject = getCheckProject , optExtensions + , optHaddockParse } <- getIdeOptions -- populate the knownTargetsVar with all the @@ -496,7 +497,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do packageSetup (hieYaml, cfp, opts, libDir) = do -- Parse DynFlags for the newly discovered component hscEnv <- emptyHscEnv ideNc libDir - newTargetDfs <- evalGhcEnv hscEnv $ setOptions cfp opts (hsc_dflags hscEnv) rootDir + newTargetDfs <- evalGhcEnv hscEnv $ setOptions optHaddockParse cfp opts (hsc_dflags hscEnv) rootDir let deps = componentDependencies opts ++ maybeToList hieYaml dep_info <- getDependencyInfo deps -- Now lookup to see whether we are combining with an existing HscEnv @@ -1110,12 +1111,13 @@ addUnit unit_str = liftEwM $ do -- | Throws if package flags are unsatisfiable setOptions :: GhcMonad m - => NormalizedFilePath + => OptHaddockParse + -> NormalizedFilePath -> ComponentOptions -> DynFlags -> FilePath -- ^ root dir, see Note [Root Directory] -> m (NonEmpty (DynFlags, [GHC.Target])) -setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do +setOptions haddockOpt cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do ((theOpts',_errs,_warns),units) <- processCmdLineP unit_flags [] (map noLoc theOpts) case NE.nonEmpty units of Just us -> initMulti us @@ -1179,6 +1181,7 @@ setOptions cfp (ComponentOptions theOpts compRoot _) dflags rootDir = do dontWriteHieFiles $ setIgnoreInterfacePragmas $ setBytecodeLinkerOptions $ + enableOptHaddock haddockOpt $ disableOptimisation $ Compat.setUpTypedHoles $ makeDynFlagsAbsolute compRoot -- makeDynFlagsAbsolute already accounts for workingDirectory @@ -1192,6 +1195,14 @@ setIgnoreInterfacePragmas df = disableOptimisation :: DynFlags -> DynFlags disableOptimisation df = updOptLevel 0 df +-- | We always compile with '-haddock' unless explicitly disabled. +-- +-- This avoids inconsistencies when doing recompilation checking which was +-- observed in https://github.com/haskell/haskell-language-server/issues/4511 +enableOptHaddock :: OptHaddockParse -> DynFlags -> DynFlags +enableOptHaddock HaddockParse d = gopt_set d Opt_Haddock +enableOptHaddock NoHaddockParse d = d + setHiDir :: FilePath -> DynFlags -> DynFlags setHiDir f d = -- override user settings to avoid conflicts leading to recompilation diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index f1b11d971b..f76849624d 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -260,12 +260,10 @@ getParsedModuleRule recorder = let ms = ms' { ms_hspp_opts = modify_dflags $ ms_hspp_opts ms' } reset_ms pm = pm { pm_mod_summary = ms' } - -- We still parse with Haddocks whether Opt_Haddock is True or False to collect information - -- but we no longer need to parse with and without Haddocks separately for above GHC90. - liftIO $ (fmap.fmap.fmap) reset_ms $ getParsedModuleDefinition hsc opt file (withOptHaddock ms) + liftIO $ (fmap.fmap.fmap) reset_ms $ getParsedModuleDefinition hsc opt file ms -withOptHaddock :: ModSummary -> ModSummary -withOptHaddock = withOption Opt_Haddock +withoutOptHaddock :: ModSummary -> ModSummary +withoutOptHaddock = withoutOption Opt_Haddock withOption :: GeneralFlag -> ModSummary -> ModSummary withOption opt ms = ms{ms_hspp_opts= gopt_set (ms_hspp_opts ms) opt} @@ -284,7 +282,7 @@ getParsedModuleWithCommentsRule recorder = ModSummaryResult{msrModSummary = ms, msrHscEnv = hsc} <- use_ GetModSummary file opt <- getIdeOptions - let ms' = withoutOption Opt_Haddock $ withOption Opt_KeepRawTokenStream ms + let ms' = withoutOptHaddock $ withOption Opt_KeepRawTokenStream ms modify_dflags <- getModifyDynFlags dynFlagsModifyParser let ms'' = ms' { ms_hspp_opts = modify_dflags $ ms_hspp_opts ms' } reset_ms pm = pm { pm_mod_summary = ms' } @@ -972,8 +970,8 @@ regenerateHiFile sess f ms compNeeded = do hsc <- setFileCacheHook (hscEnv sess) opt <- getIdeOptions - -- Embed haddocks in the interface file - (diags, mb_pm) <- liftIO $ getParsedModuleDefinition hsc opt f (withOptHaddock ms) + -- By default, we parse with `-haddock` unless 'OptHaddockParse' is overwritten. + (diags, mb_pm) <- liftIO $ getParsedModuleDefinition hsc opt f ms case mb_pm of Nothing -> return (diags, Nothing) Just pm -> do diff --git a/ghcide/src/Development/IDE/Types/Options.hs b/ghcide/src/Development/IDE/Types/Options.hs index be3ea20932..8d4d91e166 100644 --- a/ghcide/src/Development/IDE/Types/Options.hs +++ b/ghcide/src/Development/IDE/Types/Options.hs @@ -68,10 +68,12 @@ data IdeOptions = IdeOptions , optCheckParents :: IO CheckParents -- ^ When to typecheck reverse dependencies of a file , optHaddockParse :: OptHaddockParse - -- ^ Whether to return result of parsing module with Opt_Haddock. - -- Otherwise, return the result of parsing without Opt_Haddock, so - -- that the parsed module contains the result of Opt_KeepRawTokenStream, - -- which might be necessary for hlint. + -- ^ Whether to parse modules with '-haddock' by default. + -- If 'HaddockParse' is given, we parse local haskell modules with the + -- '-haddock' flag enables. + -- If a plugin requires the parsed sources *without* '-haddock', it needs + -- to use rules that explicitly disable the '-haddock' flag. + -- See call sites of 'withoutOptHaddock' for rules that parse without '-haddock'. , optModifyDynFlags :: Config -> DynFlagsModifications -- ^ Will be called right after setting up a new cradle, -- allowing to customize the Ghc options used From b8c9b8466afe5521ce5ae3b2c7195cafe8dda371 Mon Sep 17 00:00:00 2001 From: George Thomas Date: Tue, 22 Jul 2025 13:44:12 +0100 Subject: [PATCH 72/90] Use plain comments instead of annotations for HLint ignores (#4669) --- .../hls-hlint-plugin/src/Ide/Plugin/Hlint.hs | 17 +++-------------- plugins/hls-hlint-plugin/test/Main.hs | 17 ++++++----------- .../test/testdata/IgnoreHintAction.expected.hs | 3 +++ .../test/testdata/IgnoreHintAction.hs | 2 ++ .../testdata/UnrecognizedPragmasOff.expected.hs | 4 ---- .../test/testdata/UnrecognizedPragmasOff.hs | 3 --- .../testdata/UnrecognizedPragmasOn.expected.hs | 5 ----- .../test/testdata/UnrecognizedPragmasOn.hs | 3 --- 8 files changed, 14 insertions(+), 40 deletions(-) create mode 100644 plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs create mode 100644 plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs delete mode 100644 plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs delete mode 100644 plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs delete mode 100644 plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs delete mode 100644 plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs diff --git a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs index 5a72455eb5..210e9f3910 100644 --- a/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs +++ b/plugins/hls-hlint-plugin/src/Ide/Plugin/Hlint.hs @@ -64,11 +64,9 @@ import System.Environment (setEnv, #endif import Development.IDE.GHC.Compat (DynFlags, - WarningFlag (Opt_WarnUnrecognisedPragmas), extensionFlags, ms_hspp_opts, - topDir, - wopt) + topDir) import qualified Development.IDE.GHC.Compat.Util as EnumSet #if MIN_GHC_API_VERSION(9,4,0) @@ -466,19 +464,10 @@ mkSuppressHintTextEdits dynFlags fileContents hint = NextPragmaInfo{ nextPragmaLine, lineSplitTextEdits } = getNextPragmaInfo dynFlags (Just fileContents) nextPragmaLinePosition = Position (fromIntegral nextPragmaLine) 0 nextPragmaRange = Range nextPragmaLinePosition nextPragmaLinePosition - wnoUnrecognisedPragmasText = - if wopt Opt_WarnUnrecognisedPragmas dynFlags - then Just "{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}\n" - else Nothing - hlintIgnoreText = Just ("{-# HLINT ignore \"" <> hint <> "\" #-}\n") - -- we combine the texts into a single text because lsp-test currently - -- applies text edits backwards and I want the options pragma to - -- appear above the hlint pragma in the tests - combinedText = mconcat $ catMaybes [wnoUnrecognisedPragmasText, hlintIgnoreText] - combinedTextEdit = LSP.TextEdit nextPragmaRange combinedText + textEdit = LSP.TextEdit nextPragmaRange $ "{- HLINT ignore \"" <> hint <> "\" -}\n" lineSplitTextEditList = maybe [] (\LineSplitTextEdits{..} -> [lineSplitInsertTextEdit, lineSplitDeleteTextEdit]) lineSplitTextEdits in - combinedTextEdit : lineSplitTextEditList + textEdit : lineSplitTextEditList -- --------------------------------------------------------------------- ignoreHint :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> VersionedTextDocumentIdentifier -> HintTitle -> IO (Either PluginError WorkspaceEdit) diff --git a/plugins/hls-hlint-plugin/test/Main.hs b/plugins/hls-hlint-plugin/test/Main.hs index 4eea2a803a..360a9c0c01 100644 --- a/plugins/hls-hlint-plugin/test/Main.hs +++ b/plugins/hls-hlint-plugin/test/Main.hs @@ -48,9 +48,9 @@ resolveTests :: TestTree resolveTests = knownBrokenForGhcVersions [GHC910] "apply-refact doesn't work on 9.10" $ testGroup "hlint resolve tests" [ ignoreHintGoldenResolveTest - "Resolve version of: Ignore hint in this module inserts -Wno-unrecognised-pragmas and hlint ignore pragma if warn unrecognized pragmas is off" - "UnrecognizedPragmasOff" - (Point 3 8) + "Resolve version of: Ignore hint in this module inserts hlint ignore pragma" + "IgnoreHintAction" + (Point 2 8) "Eta reduce" , applyHintGoldenResolveTest "Resolve version of: [#2612] Apply hint works when operator fixities go right-to-left" @@ -64,14 +64,9 @@ ignoreHintTests :: TestTree ignoreHintTests = testGroup "hlint ignore hint tests" [ ignoreHintGoldenTest - "Ignore hint in this module inserts -Wno-unrecognised-pragmas and hlint ignore pragma if warn unrecognized pragmas is off" - "UnrecognizedPragmasOff" - (Point 3 8) - "Eta reduce" - , ignoreHintGoldenTest - "Ignore hint in this module inserts only hlint ignore pragma if warn unrecognized pragmas is on" - "UnrecognizedPragmasOn" - (Point 3 9) + "Ignore hint in this module inserts hlint ignore pragma" + "IgnoreHintAction" + (Point 2 8) "Eta reduce" ] diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs new file mode 100644 index 0000000000..b3ae28995e --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.expected.hs @@ -0,0 +1,3 @@ +{- HLINT ignore "Eta reduce" -} +module IgnoreHintAction where +foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs new file mode 100644 index 0000000000..7fb147a40f --- /dev/null +++ b/plugins/hls-hlint-plugin/test/testdata/IgnoreHintAction.hs @@ -0,0 +1,2 @@ +module IgnoreHintAction where +foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs deleted file mode 100644 index 31d9aed946..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.expected.hs +++ /dev/null @@ -1,4 +0,0 @@ -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} -{-# HLINT ignore "Eta reduce" #-} -module UnrecognizedPragmasOff where -foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs deleted file mode 100644 index 2611c9a7f7..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOff.hs +++ /dev/null @@ -1,3 +0,0 @@ -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} -module UnrecognizedPragmasOff where -foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs deleted file mode 100644 index 564503ca40..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.expected.hs +++ /dev/null @@ -1,5 +0,0 @@ -{-# OPTIONS_GHC -Wunrecognised-pragmas #-} -{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-} -{-# HLINT ignore "Eta reduce" #-} -module UnrecognizedPragmasOn where -foo x = id x diff --git a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs b/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs deleted file mode 100644 index bac66497ba..0000000000 --- a/plugins/hls-hlint-plugin/test/testdata/UnrecognizedPragmasOn.hs +++ /dev/null @@ -1,3 +0,0 @@ -{-# OPTIONS_GHC -Wunrecognised-pragmas #-} -module UnrecognizedPragmasOn where -foo x = id x From 748603e1cf4d85b3aa31bff4d91edd4b8b3fa66b Mon Sep 17 00:00:00 2001 From: Andreas Abel Date: Thu, 31 Jul 2025 13:25:06 +0200 Subject: [PATCH 73/90] Fix build with GHC 9.10.3-rc1 Re: - https://gitlab.haskell.org/ghc/ghc/-/issues/22581 - https://gitlab.haskell.org/ghc/ghc/-/issues/26250 Closes #4678 --- .../src/Ide/Plugin/SemanticTokens/SemanticConfig.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs index e9e8034ce3..be793cfe7a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/SemanticConfig.hs @@ -16,8 +16,8 @@ import Development.IDE (Action, usePropertyAction) import GHC.TypeLits (KnownSymbol) import Ide.Plugin.Properties (KeyNameProxy, NotElem, Properties, - PropertyKey (type PropertyKey), - PropertyType (type TEnum), + PropertyKey (PropertyKey), + PropertyType (TEnum), defineEnumProperty, emptyProperties) import Ide.Plugin.SemanticTokens.Types From 59b733f0f77ab49e84e2a579650d620284940e41 Mon Sep 17 00:00:00 2001 From: Dominik Schrempf Date: Sat, 2 Aug 2025 13:41:34 +0200 Subject: [PATCH 74/90] Remove `hie-compat` (#4613) While removing references to GHC 9.4, I realized that some parts of HLS are referring to even older versions of GHC. For example, `hie-compat` is a compatibility library backporting support of Haskell IDE Engine (HIE) features to older versions of GHC. Since GHC 9.2, `hie-compat` only re-exported definitions already present in the `ghc` library, and so, is essentially obsolete. FYI: We still have `hie-compat` in the dependency graph, because some libraries (e.g., `hiedb`) are using it. --- CODEOWNERS | 1 - RELEASING.md | 1 - cabal.project | 1 - docs/contributing/contributing.md | 1 - ghcide/ghcide.cabal | 1 - ghcide/src/Development/IDE/Core/Actions.hs | 1 + ghcide/src/Development/IDE/Core/Compile.hs | 2 + ghcide/src/Development/IDE/Core/RuleTypes.hs | 3 + ghcide/src/Development/IDE/Core/Rules.hs | 8 +- ghcide/src/Development/IDE/GHC/Compat.hs | 16 +- ghcide/src/Development/IDE/GHC/CoreFile.hs | 1 - ghcide/src/Development/IDE/GHC/Orphans.hs | 7 +- .../IDE/Plugin/Completions/Logic.hs | 3 + ghcide/src/Development/IDE/Spans/AtPoint.hs | 19 +- .../Development/IDE/Spans/Documentation.hs | 1 + .../Development/IDE/Spans/LocalBindings.hs | 11 +- haskell-language-server.cabal | 8 +- hie-compat/CHANGELOG.md | 5 - hie-compat/LICENSE | 201 -- hie-compat/README.md | 24 - hie-compat/hie-compat.cabal | 39 - hie-compat/src-ghc92/Compat/HieAst.hs | 2132 ----------------- hie-compat/src-reexport-ghc9/Compat/HieBin.hs | 8 - .../src-reexport-ghc9/Compat/HieDebug.hs | 10 - .../src-reexport-ghc9/Compat/HieTypes.hs | 3 - .../src-reexport-ghc9/Compat/HieUtils.hs | 3 - .../src-reexport-ghc92/Compat/HieAst.hs | 3 - .../src-reexport-ghc92/Compat/HieBin.hs | 8 - .../src-reexport-ghc92/Compat/HieDebug.hs | 10 - .../src-reexport-ghc92/Compat/HieTypes.hs | 3 - .../src-reexport-ghc92/Compat/HieUtils.hs | 3 - hie-compat/src-reexport/Compat/HieDebug.hs | 3 - hie-compat/src-reexport/Compat/HieTypes.hs | 3 - hie-compat/src-reexport/Compat/HieUtils.hs | 3 - .../src/Development/IDE/Test/Diagnostic.hs | 2 +- .../src/Ide/Plugin/CallHierarchy/Internal.hs | 6 + .../src/Ide/Plugin/Class/CodeAction.hs | 3 + .../src/Ide/Plugin/CodeRange/ASTPreProcess.hs | 6 +- .../src/Ide/Plugin/CodeRange/Rules.hs | 7 +- .../src/Ide/Plugin/ExplicitFields.hs | 4 +- .../src/Ide/Plugin/QualifyImportedNames.hs | 10 +- .../src/Development/IDE/Plugin/CodeAction.hs | 2 + .../src/Ide/Plugin/Rename.hs | 6 +- .../src/Ide/Plugin/SemanticTokens/Internal.hs | 3 + .../src/Ide/Plugin/SemanticTokens/Mappings.hs | 4 + .../src/Ide/Plugin/SemanticTokens/Query.hs | 3 + .../src/Ide/Plugin/SemanticTokens/Tokenize.hs | 4 + .../src/Ide/Plugin/SemanticTokens/Types.hs | 6 +- .../src/Ide/Plugin/SemanticTokens/Utils.hs | 5 + .../hls-stan-plugin/src/Ide/Plugin/Stan.hs | 2 +- stack-lts22.yaml | 3 +- stack.yaml | 3 +- 52 files changed, 111 insertions(+), 2514 deletions(-) delete mode 100644 hie-compat/CHANGELOG.md delete mode 100644 hie-compat/LICENSE delete mode 100644 hie-compat/README.md delete mode 100644 hie-compat/hie-compat.cabal delete mode 100644 hie-compat/src-ghc92/Compat/HieAst.hs delete mode 100644 hie-compat/src-reexport-ghc9/Compat/HieBin.hs delete mode 100644 hie-compat/src-reexport-ghc9/Compat/HieDebug.hs delete mode 100644 hie-compat/src-reexport-ghc9/Compat/HieTypes.hs delete mode 100644 hie-compat/src-reexport-ghc9/Compat/HieUtils.hs delete mode 100644 hie-compat/src-reexport-ghc92/Compat/HieAst.hs delete mode 100644 hie-compat/src-reexport-ghc92/Compat/HieBin.hs delete mode 100644 hie-compat/src-reexport-ghc92/Compat/HieDebug.hs delete mode 100644 hie-compat/src-reexport-ghc92/Compat/HieTypes.hs delete mode 100644 hie-compat/src-reexport-ghc92/Compat/HieUtils.hs delete mode 100644 hie-compat/src-reexport/Compat/HieDebug.hs delete mode 100644 hie-compat/src-reexport/Compat/HieTypes.hs delete mode 100644 hie-compat/src-reexport/Compat/HieUtils.hs diff --git a/CODEOWNERS b/CODEOWNERS index 7d66f7805e..820661ceeb 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -4,7 +4,6 @@ /hls-graph @wz1000 /hls-plugin-api @michaelpj @fendor /hls-test-utils @fendor -/hie-compat @wz1000 # HLS main /src @fendor diff --git a/RELEASING.md b/RELEASING.md index a48b32cb93..74da125d86 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -9,7 +9,6 @@ - [ ] bump package versions in all `*.cabal` files (same version as hls) - HLS uses lockstep versioning. The core packages and all plugins use the same version number, and only support exactly this version. - Exceptions: - - `hie-compat` requires no automatic version bump. - `shake-bench` is an internal testing tool, not exposed to the outside world. Thus, no version bump required for releases. - For updating cabal files, the following script can be used: - ```sh diff --git a/cabal.project b/cabal.project index 17524ede42..fed144eb90 100644 --- a/cabal.project +++ b/cabal.project @@ -1,6 +1,5 @@ packages: ./ - ./hie-compat ./shake-bench ./hls-graph ./ghcide diff --git a/docs/contributing/contributing.md b/docs/contributing/contributing.md index 134a03b89c..08ad21f12e 100644 --- a/docs/contributing/contributing.md +++ b/docs/contributing/contributing.md @@ -197,7 +197,6 @@ pre-commit install #### Why are some components excluded from automatic formatting? - `test/testdata` and `test/data` are excluded because we want to test formatting plugins. -- `hie-compat` is excluded because we want to keep its code as close to GHC as possible. ## Plugin tutorial diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 2fcca48d6d..6c2faa59a2 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -74,7 +74,6 @@ library , haddock-library >=1.8 && <1.12 , hashable , hie-bios ^>=0.16.0 - , hie-compat ^>=0.3.0.0 , hiedb ^>= 0.7.0.0 , hls-graph == 2.11.0.0 , hls-plugin-api == 2.11.0.0 diff --git a/ghcide/src/Development/IDE/Core/Actions.hs b/ghcide/src/Development/IDE/Core/Actions.hs index 0d55a73120..61614cb0ca 100644 --- a/ghcide/src/Development/IDE/Core/Actions.hs +++ b/ghcide/src/Development/IDE/Core/Actions.hs @@ -28,6 +28,7 @@ import Development.IDE.Graph import qualified Development.IDE.Spans.AtPoint as AtPoint import Development.IDE.Types.HscEnvEq (hscEnv) import Development.IDE.Types.Location +import GHC.Iface.Ext.Types (Identifier) import qualified HieDb import Language.LSP.Protocol.Types (DocumentHighlight (..), SymbolInformation (..), diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs index 552409fbba..48439e2ff3 100644 --- a/ghcide/src/Development/IDE/Core/Compile.hs +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -109,6 +109,7 @@ import qualified Data.Set as Set import qualified GHC as G import GHC.Core.Lint.Interactive import GHC.Driver.Config.CoreToStg.Prep +import GHC.Iface.Ext.Types (HieASTs) import qualified GHC.Runtime.Loader as Loader import GHC.Tc.Gen.Splice import GHC.Types.Error @@ -145,6 +146,7 @@ import Development.IDE.GHC.Compat hiding import qualified Data.List.NonEmpty as NE import Data.Time (getCurrentTime) import GHC.Driver.Env (hsc_all_home_unit_ids) +import GHC.Iface.Ext.Types (NameEntityInfo) #endif #if MIN_VERSION_ghc(9,12,0) diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs index 43b80be119..63122d4025 100644 --- a/ghcide/src/Development/IDE/Core/RuleTypes.hs +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -34,6 +34,9 @@ import Development.IDE.Import.DependencyInformation import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.KnownTargets import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (HieASTs, + TypeIndex) +import GHC.Iface.Ext.Utils (RefMap) import Data.ByteString (ByteString) import Data.Text.Utf16.Rope.Mixed (Rope) diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index f76849624d..071ecafc41 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -138,6 +138,8 @@ import Development.IDE.Types.HscEnvEq import Development.IDE.Types.Location import Development.IDE.Types.Options import qualified Development.IDE.Types.Shake as Shake +import GHC.Iface.Ext.Types (HieASTs (..)) +import GHC.Iface.Ext.Utils (generateReferencesMap) import qualified GHC.LanguageExtensions as LangExt import HIE.Bios.Ghc.Gap (hostIsDynamic) import qualified HieDb @@ -510,7 +512,7 @@ persistentHieFileRule recorder = addPersistentRule GetHieAst $ \file -> runMaybe (currentSource, ver) <- liftIO $ case M.lookup (filePathToUri' file) vfsData of Nothing -> (,Nothing) . T.decodeUtf8 <$> BS.readFile (fromNormalizedFilePath file) Just vf -> pure (virtualFileText vf, Just $ virtualFileVersion vf) - let refmap = Compat.generateReferencesMap . Compat.getAsts . Compat.hie_asts $ res + let refmap = generateReferencesMap . getAsts . Compat.hie_asts $ res del = deltaFromDiff (T.decodeUtf8 $ Compat.hie_hs_src res) currentSource pure (HAR (Compat.hie_module res) (Compat.hie_asts res) refmap mempty (HieFromDisk res),del,ver) @@ -538,8 +540,8 @@ getHieAstRuleDefinition f hsc tmr = do liftIO $ writeAndIndexHieFile hsc se modSummary f exports asts source _ -> pure [] - let refmap = Compat.generateReferencesMap . Compat.getAsts <$> masts - typemap = AtPoint.computeTypeReferences . Compat.getAsts <$> masts + let refmap = generateReferencesMap . getAsts <$> masts + typemap = AtPoint.computeTypeReferences . getAsts <$> masts pure (diags <> diagsWrite, HAR (ms_mod $ tmrModSummary tmr) <$> masts <*> refmap <*> typemap <*> pure HieFresh) getImportMapRule :: Recorder (WithPriority Log) -> Rules () diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs index ddf01c61c5..befd22c8de 100644 --- a/ghcide/src/Development/IDE/GHC/Compat.hs +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -45,8 +45,6 @@ module Development.IDE.GHC.Compat( readHieFile, setHieDir, dontWriteHieFiles, - module Compat.HieTypes, - module Compat.HieUtils, -- * Compat modules module Development.IDE.GHC.Compat.Core, module Development.IDE.GHC.Compat.Env, @@ -112,14 +110,8 @@ module Development.IDE.GHC.Compat( #if MIN_VERSION_ghc(9,7,0) tcInitTidyEnv, #endif - ) where -import Compat.HieAst (enrichHie) -import Compat.HieBin -import Compat.HieTypes hiding - (nodeAnnotations) -import qualified Compat.HieTypes as GHC (nodeAnnotations) -import Compat.HieUtils + ) where import Control.Applicative ((<|>)) import qualified Data.ByteString as BS import Data.Coerce (coerce) @@ -146,12 +138,18 @@ import GHC.Core.Tidy (tidyExpr) import GHC.CoreToStg.Prep (corePrepPgm) import qualified GHC.CoreToStg.Prep as GHC import GHC.Driver.Hooks (hscCompileCoreExprHook) +import GHC.Iface.Ext.Types hiding + (nodeAnnotations) +import qualified GHC.Iface.Ext.Types as GHC (nodeAnnotations) +import GHC.Iface.Ext.Utils import GHC.ByteCode.Asm (bcoFreeNames) import GHC.Core import GHC.Data.FastString import GHC.Data.StringBuffer import GHC.Driver.Session hiding (ExposePackage) +import GHC.Iface.Ext.Ast (enrichHie) +import GHC.Iface.Ext.Binary import GHC.Iface.Make (mkIfaceExports) import GHC.SysTools.Tasks (runPp, runUnlit) import GHC.Types.Annotations (AnnTarget (ModuleTarget), diff --git a/ghcide/src/Development/IDE/GHC/CoreFile.hs b/ghcide/src/Development/IDE/GHC/CoreFile.hs index 9977ad573b..99b7328770 100644 --- a/ghcide/src/Development/IDE/GHC/CoreFile.hs +++ b/ghcide/src/Development/IDE/GHC/CoreFile.hs @@ -18,7 +18,6 @@ import Data.Foldable import Data.IORef import Data.List (isPrefixOf) import Data.Maybe -import qualified Data.Text as T import Development.IDE.GHC.Compat import qualified Development.IDE.GHC.Compat.Util as Util import GHC.Core diff --git a/ghcide/src/Development/IDE/GHC/Orphans.hs b/ghcide/src/Development/IDE/GHC/Orphans.hs index 543c6f4387..068ca6a78a 100644 --- a/ghcide/src/Development/IDE/GHC/Orphans.hs +++ b/ghcide/src/Development/IDE/GHC/Orphans.hs @@ -7,9 +7,7 @@ -- | Orphan instances for GHC. -- Note that the 'NFData' instances may not be law abiding. module Development.IDE.GHC.Orphans() where -import Development.IDE.GHC.Compat hiding - (DuplicateRecordFields, - FieldSelectors) +import Development.IDE.GHC.Compat import Development.IDE.GHC.Util import Control.DeepSeq @@ -24,9 +22,8 @@ import GHC.ByteCode.Types import GHC.Data.Bag import GHC.Data.FastString import qualified GHC.Data.StringBuffer as SB +import GHC.Iface.Ext.Types import GHC.Parser.Annotation -import GHC.Types.FieldLabel (DuplicateRecordFields (DuplicateRecordFields, NoDuplicateRecordFields), - FieldSelectors (FieldSelectors, NoFieldSelectors)) import GHC.Types.PkgQual import GHC.Types.SrcLoc diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs index a00705ba39..0a5cecaca8 100644 --- a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs +++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs @@ -49,6 +49,9 @@ import Development.IDE.Plugin.Completions.Types import Development.IDE.Spans.LocalBindings import Development.IDE.Types.Exports import Development.IDE.Types.Options +import GHC.Iface.Ext.Types (HieAST, + NodeInfo (..)) +import GHC.Iface.Ext.Utils (nodeInfo) import Ide.PluginUtils (mkLspCommand) import Ide.Types (CommandId (..), IdePlugins (..), diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs index 16b4f65b11..50df0f5ba5 100644 --- a/ghcide/src/Development/IDE/Spans/AtPoint.hs +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -67,6 +67,23 @@ import Data.Tree import qualified Data.Tree as T import Data.Version (showVersion) import Development.IDE.Types.Shake (WithHieDb) +import GHC.Iface.Ext.Types (EvVarSource (..), + HieAST (..), + HieASTs (..), + HieArgs (..), + HieType (..), Identifier, + IdentifierDetails (..), + NodeInfo (..), Scope, + Span) +import GHC.Iface.Ext.Utils (EvidenceInfo (..), + RefMap, getEvidenceTree, + getScopeFromContext, + hieTypeToIface, + isEvidenceContext, + isEvidenceUse, + isOccurrence, nodeInfo, + recoverFullType, + selectSmallestContaining) import HieDb hiding (pointCommand, withHieDb) import System.Directory (doesFileExist) @@ -488,7 +505,7 @@ instanceLocationsAtPoint instanceLocationsAtPoint withHieDb lookupModule _ideOptions pos (HAR _ ast _rm _ _) = let ns = concat $ pointCommand ast pos (M.keys . getNodeIds) evTrees = mapMaybe (eitherToMaybe >=> getEvidenceTree _rm) ns - evNs = concatMap (map (evidenceVar) . T.flatten) evTrees + evNs = concatMap (map evidenceVar . T.flatten) evTrees in fmap (nubOrd . concat) $ mapMaybeM (nameToLocation withHieDb lookupModule) evNs diff --git a/ghcide/src/Development/IDE/Spans/Documentation.hs b/ghcide/src/Development/IDE/Spans/Documentation.hs index 85f2ef1037..dcf7778de3 100644 --- a/ghcide/src/Development/IDE/Spans/Documentation.hs +++ b/ghcide/src/Development/IDE/Spans/Documentation.hs @@ -28,6 +28,7 @@ import Development.IDE.GHC.Compat.Util import Development.IDE.GHC.Error import Development.IDE.GHC.Util (printOutputable) import Development.IDE.Spans.Common +import GHC.Iface.Ext.Utils (RefMap) import Language.LSP.Protocol.Types (filePathToUri, getUri) import Prelude hiding (mod) import System.Directory diff --git a/ghcide/src/Development/IDE/Spans/LocalBindings.hs b/ghcide/src/Development/IDE/Spans/LocalBindings.hs index 8ca811eaa0..8806ed8ab3 100644 --- a/ghcide/src/Development/IDE/Spans/LocalBindings.hs +++ b/ghcide/src/Development/IDE/Spans/LocalBindings.hs @@ -17,15 +17,16 @@ import qualified Data.IntervalMap.FingerTree as IM import qualified Data.List as L import qualified Data.Map as M import qualified Data.Set as S +import GHC.Iface.Ext.Types (IdentifierDetails (..), + Scope (..)) +import GHC.Iface.Ext.Utils (RefMap, getBindSiteFromContext, + getScopeFromContext) + import Development.IDE.GHC.Compat (Name, NameEnv, RealSrcSpan, - RefMap, Scope (..), Type, - getBindSiteFromContext, - getScopeFromContext, identInfo, - identType, isSystemName, + Type, isSystemName, nonDetNameEnvElts, realSrcSpanEnd, realSrcSpanStart, unitNameEnv) - import Development.IDE.GHC.Error import Development.IDE.Types.Location diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index f4066dca94..ab57fa79ea 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -406,6 +406,7 @@ library hls-call-hierarchy-plugin , aeson , containers , extra + , ghc , ghcide == 2.11.0.0 , hiedb ^>= 0.7.0.0 , hls-plugin-api == 2.11.0.0 @@ -592,10 +593,10 @@ library hls-rename-plugin hs-source-dirs: plugins/hls-rename-plugin/src build-depends: , containers + , ghc , ghcide == 2.11.0.0 , hashable , hiedb ^>= 0.7.0.0 - , hie-compat , hls-plugin-api == 2.11.0.0 , haskell-language-server:hls-refactor-plugin , lens @@ -800,7 +801,6 @@ library hls-stan-plugin build-depends: , deepseq , hashable - , hie-compat , hls-plugin-api , ghcide , lsp-types @@ -1066,6 +1066,7 @@ library hls-qualify-imported-names-plugin hs-source-dirs: plugins/hls-qualify-imported-names-plugin/src build-depends: , containers + , ghc , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , lens @@ -1119,6 +1120,7 @@ library hls-code-range-plugin , containers , deepseq , extra + , ghc , ghcide == 2.11.0.0 , hashable , hls-plugin-api == 2.11.0.0 @@ -1322,6 +1324,7 @@ library hls-explicit-record-fields-plugin buildable: False exposed-modules: Ide.Plugin.ExplicitFields build-depends: + , ghc , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , lsp @@ -1730,6 +1733,7 @@ library hls-semantic-tokens-plugin , extra , text-rope , mtl >= 2.2 + , ghc , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 , lens diff --git a/hie-compat/CHANGELOG.md b/hie-compat/CHANGELOG.md deleted file mode 100644 index 82d590f7ab..0000000000 --- a/hie-compat/CHANGELOG.md +++ /dev/null @@ -1,5 +0,0 @@ -# Revision history for hie-compat - -## 0.1.0.0 -- 2020-10-19 - -* Initial Release diff --git a/hie-compat/LICENSE b/hie-compat/LICENSE deleted file mode 100644 index 8775cb7967..0000000000 --- a/hie-compat/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2019 Zubin Duggal - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/hie-compat/README.md b/hie-compat/README.md deleted file mode 100644 index 7ac08b305a..0000000000 --- a/hie-compat/README.md +++ /dev/null @@ -1,24 +0,0 @@ -# hie-compat - -Mainly a backport of [HIE -Files](https://gitlab.haskell.org/ghc/ghc/-/wikis/hie-files) for ghc 8.8, along -with a few other backports of fixes useful for `ghcide` - -Also includes backport of record-dot-syntax support to 9.2.x - -Fully compatible with `.hie` files natively produced by versions of GHC that support -them. - -**THIS DOES NOT LET YOU READ HIE FILES WITH MISMATCHED VERSIONS OF GHC** - -Backports included: - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/8589 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/4037 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/4068 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/3199 - -https://gitlab.haskell.org/ghc/ghc/-/merge_requests/2578 diff --git a/hie-compat/hie-compat.cabal b/hie-compat/hie-compat.cabal deleted file mode 100644 index 2b361df887..0000000000 --- a/hie-compat/hie-compat.cabal +++ /dev/null @@ -1,39 +0,0 @@ -cabal-version: 1.22 -name: hie-compat -version: 0.3.1.2 -synopsis: HIE files for GHC 8.8 and other HIE file backports -license: Apache-2.0 -description: - Backports for HIE files to GHC 8.8, along with a few other backports - of HIE file related fixes for ghcide. - - THIS DOES NOT LET YOU READ HIE FILES WITH MISMATCHED VERSIONS OF GHC -license-file: LICENSE -author: Zubin Duggal -maintainer: zubin.duggal@gmail.com -build-type: Simple -extra-source-files: CHANGELOG.md README.md -category: Development -homepage: https://github.com/haskell/haskell-language-server/tree/master/hie-compat#readme -bug-reports: https://github.com/haskell/haskell-language-server/issues - -source-repository head - type: git - location: https://github.com/haskell/haskell-language-server.git - -library - default-language: GHC2021 - build-depends: - base < 4.22, array, bytestring, containers, directory, filepath, transformers - build-depends: ghc >= 8.10, ghc-boot - ghc-options: -Wall -Wno-name-shadowing - - exposed-modules: - Compat.HieAst - Compat.HieBin - Compat.HieTypes - Compat.HieDebug - Compat.HieUtils - - if (impl(ghc >= 9.4)) - hs-source-dirs: src-reexport-ghc92 diff --git a/hie-compat/src-ghc92/Compat/HieAst.hs b/hie-compat/src-ghc92/Compat/HieAst.hs deleted file mode 100644 index 3445ff6213..0000000000 --- a/hie-compat/src-ghc92/Compat/HieAst.hs +++ /dev/null @@ -1,2132 +0,0 @@ -{-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE GADTs #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE UndecidableSuperClasses #-} -{- HLINT ignore -} -{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} - -{- -Forked from GHC v9.2.3 to include record-dot-syntax type information in .hie files. - -Changes are marked with "CHANGED:" - -Main functions for .hie file generation --} - --- CHANGED: removed this include and updated the module declaration --- #include "HsVersions.h" --- --- module GHC.Iface.Ext.Ast ( mkHieFile, mkHieFileWithSource, getCompressedAsts, enrichHie) where - -module Compat.HieAst ( enrichHie ) where - -import GHC.Utils.Outputable(ppr) - -import GHC.Prelude - -import GHC.Types.Avail ( Avails ) -import GHC.Data.Bag ( Bag, bagToList ) -import GHC.Types.Basic -import GHC.Data.BooleanFormula -import GHC.Core.Class ( className, classSCSelIds ) -import GHC.Core.Utils ( exprType ) -import GHC.Core.ConLike ( conLikeName, ConLike(RealDataCon) ) -import GHC.Core.TyCon ( TyCon, tyConClass_maybe ) -import GHC.Core.FVs -import GHC.Core.DataCon ( dataConNonlinearType ) -import GHC.Types.FieldLabel -import GHC.Hs -import GHC.Driver.Env -import GHC.Utils.Monad ( concatMapM, liftIO ) -import GHC.Types.Id ( isDataConId_maybe ) -import GHC.Types.Name ( Name, nameSrcSpan, nameUnique ) -import GHC.Types.Name.Env ( NameEnv, emptyNameEnv, extendNameEnv, lookupNameEnv ) -import GHC.Types.SrcLoc -import GHC.Tc.Utils.Zonk ( hsLitType, hsPatType ) -import GHC.Core.Type ( mkVisFunTys, Type ) -import GHC.Core.Predicate -import GHC.Core.InstEnv -import GHC.Builtin.Types ( mkListTy, mkSumTy ) -import GHC.Tc.Types -import GHC.Tc.Types.Evidence -import GHC.Types.Var ( Id, Var, EvId, varName, varType, varUnique ) -import GHC.Types.Var.Env -import GHC.Builtin.Uniques -import GHC.Iface.Make ( mkIfaceExports ) -import GHC.Utils.Panic -import GHC.Utils.Misc -import GHC.Data.Maybe -import GHC.Data.FastString - -import GHC.Iface.Ext.Types -import GHC.Iface.Ext.Utils - -import GHC.Unit.Module ( ModuleName, ml_hs_file ) -import GHC.Unit.Module.ModSummary - -import qualified Data.Array as A -import qualified Data.ByteString as BS -import qualified Data.Map as M -import qualified Data.Set as S -import Data.Data ( Data ) -import Data.Void ( Void, absurd ) -import Control.Monad ( forM_ ) -import Control.Monad.Trans.State.Strict -import Control.Monad.Trans.Reader -import Control.Monad.Trans.Class ( lift ) -import GHC.HsToCore.Types -import GHC.HsToCore.Expr -import GHC.HsToCore.Monad - -{- Note [Updating HieAst for changes in the GHC AST] - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When updating the code in this file for changes in the GHC AST, you -need to pay attention to the following things: - -1) Symbols (Names/Vars/Modules) in the following categories: - - a) Symbols that appear in the source file that directly correspond to - something the user typed - b) Symbols that don't appear in the source, but should be in some sense - "visible" to a user, particularly via IDE tooling or the like. This - includes things like the names introduced by RecordWildcards (We record - all the names introduced by a (..) in HIE files), and will include implicit - parameters and evidence variables after one of my pending MRs lands. - -2) Subtrees that may contain such symbols, or correspond to a SrcSpan in - the file. This includes all `Located` things - -For 1), you need to call `toHie` for one of the following instances - -instance ToHie (Context (Located Name)) where ... -instance ToHie (Context (Located Var)) where ... -instance ToHie (IEContext (Located ModuleName)) where ... - -`Context` is a data type that looks like: - -data Context a = C ContextInfo a -- Used for names and bindings - -`ContextInfo` is defined in `GHC.Iface.Ext.Types`, and looks like - -data ContextInfo - = Use -- ^ regular variable - | MatchBind - | IEThing IEType -- ^ import/export - | TyDecl - -- | Value binding - | ValBind - BindType -- ^ whether or not the binding is in an instance - Scope -- ^ scope over which the value is bound - (Maybe Span) -- ^ span of entire binding - ... - -It is used to annotate symbols in the .hie files with some extra information on -the context in which they occur and should be fairly self explanatory. You need -to select one that looks appropriate for the symbol usage. In very rare cases, -you might need to extend this sum type if none of the cases seem appropriate. - -So, given a `Located Name` that is just being "used", and not defined at a -particular location, you would do the following: - - toHie $ C Use located_name - -If you select one that corresponds to a binding site, you will need to -provide a `Scope` and a `Span` for your binding. Both of these are basically -`SrcSpans`. - -The `SrcSpan` in the `Scope` is supposed to span over the part of the source -where the symbol can be legally allowed to occur. For more details on how to -calculate this, see Note [Capturing Scopes and other non local information] -in GHC.Iface.Ext.Ast. - -The binding `Span` is supposed to be the span of the entire binding for -the name. - -For a function definition `foo`: - -foo x = x + y - where y = x^2 - -The binding `Span` is the span of the entire function definition from `foo x` -to `x^2`. For a class definition, this is the span of the entire class, and -so on. If this isn't well defined for your bit of syntax (like a variable -bound by a lambda), then you can just supply a `Nothing` - -There is a test that checks that all symbols in the resulting HIE file -occur inside their stated `Scope`. This can be turned on by passing the --fvalidate-ide-info flag to ghc along with -fwrite-ide-info to generate the -.hie file. - -You may also want to provide a test in testsuite/test/hiefile that includes -a file containing your new construction, and tests that the calculated scope -is valid (by using -fvalidate-ide-info) - -For subtrees in the AST that may contain symbols, the procedure is fairly -straightforward. If you are extending the GHC AST, you will need to provide a -`ToHie` instance for any new types you may have introduced in the AST. - -Here is an extract from the `ToHie` instance for (LHsExpr (GhcPass p)): - - toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of - HsVar _ (L _ var) -> - [ toHie $ C Use (L mspan var) - -- Patch up var location since typechecker removes it - ] - HsConLikeOut _ con -> - [ toHie $ C Use $ L mspan $ conLikeName con - ] - ... - HsApp _ a b -> - [ toHie a - , toHie b - ] - -If your subtree is `Located` or has a `SrcSpan` available, the output list -should contain a HieAst `Node` corresponding to the subtree. You can use -either `makeNode` or `getTypeNode` for this purpose, depending on whether it -makes sense to assign a `Type` to the subtree. After this, you just need -to concatenate the result of calling `toHie` on all subexpressions and -appropriately annotated symbols contained in the subtree. - -The code above from the ToHie instance of `LhsExpr (GhcPass p)` is supposed -to work for both the renamed and typechecked source. `getTypeNode` is from -the `HasType` class defined in this file, and it has different instances -for `GhcTc` and `GhcRn` that allow it to access the type of the expression -when given a typechecked AST: - -class Data a => HasType a where - getTypeNode :: a -> HieM [HieAST Type] -instance HasType (LHsExpr GhcTc) where - getTypeNode e@(L spn e') = ... -- Actually get the type for this expression -instance HasType (LHsExpr GhcRn) where - getTypeNode (L spn e) = makeNode e spn -- Fallback to a regular `makeNode` without recording the type - -If your subtree doesn't have a span available, you can omit the `makeNode` -call and just recurse directly in to the subexpressions. - --} - --- These synonyms match those defined in compiler/GHC.hs -type RenamedSource = ( HsGroup GhcRn, [LImportDecl GhcRn] - , Maybe [(LIE GhcRn, Avails)] - , Maybe LHsDocString ) -type TypecheckedSource = LHsBinds GhcTc - - -{- Note [Name Remapping] - ~~~~~~~~~~~~~~~~~~~~~ -The Typechecker introduces new names for mono names in AbsBinds. -We don't care about the distinction between mono and poly bindings, -so we replace all occurrences of the mono name with the poly name. --} -type VarMap a = DVarEnv (Var,a) -data HieState = HieState - { name_remapping :: NameEnv Id - , unlocated_ev_binds :: VarMap (S.Set ContextInfo) - -- These contain evidence bindings that we don't have a location for - -- These are placed at the top level Node in the HieAST after everything - -- else has been generated - -- This includes things like top level evidence bindings. - } - -addUnlocatedEvBind :: Var -> ContextInfo -> HieM () -addUnlocatedEvBind var ci = do - let go (a,b) (_,c) = (a,S.union b c) - lift $ modify' $ \s -> - s { unlocated_ev_binds = - extendDVarEnv_C go (unlocated_ev_binds s) - var (var,S.singleton ci) - } - -getUnlocatedEvBinds :: FastString -> HieM (NodeIdentifiers Type,[HieAST Type]) -getUnlocatedEvBinds file = do - binds <- lift $ gets unlocated_ev_binds - org <- ask - let elts = dVarEnvElts binds - - mkNodeInfo (n,ci) = (Right (varName n), IdentifierDetails (Just $ varType n) ci) - - go e@(v,_) (xs,ys) = case nameSrcSpan $ varName v of - RealSrcSpan spn _ - | srcSpanFile spn == file -> - let node = Node (mkSourcedNodeInfo org ni) spn [] - ni = NodeInfo mempty [] $ M.fromList [mkNodeInfo e] - in (xs,node:ys) - _ -> (mkNodeInfo e : xs,ys) - - (nis,asts) = foldr go ([],[]) elts - - pure $ (M.fromList nis, asts) - -initState :: HieState -initState = HieState emptyNameEnv emptyDVarEnv - -class ModifyState a where -- See Note [Name Remapping] - addSubstitution :: a -> a -> HieState -> HieState - -instance ModifyState Name where - addSubstitution _ _ hs = hs - -instance ModifyState Id where - addSubstitution mono poly hs = - hs{name_remapping = extendNameEnv (name_remapping hs) (varName mono) poly} - -modifyState :: ModifyState (IdP p) => [ABExport p] -> HieState -> HieState -modifyState = foldr go id - where - go ABE{abe_poly=poly,abe_mono=mono} f - = addSubstitution mono poly . f - go _ f = f - -type HieM = ReaderT NodeOrigin (StateT HieState DsM) - --- | Construct an 'HieFile' from the outputs of the typechecker. -mkHieFile :: ModSummary - -> TcGblEnv - -> RenamedSource -> Hsc HieFile -mkHieFile ms ts rs = do - let src_file = expectJust "mkHieFile" (ml_hs_file $ ms_location ms) - src <- liftIO $ BS.readFile src_file - mkHieFileWithSource src_file src ms ts rs - --- | Construct an 'HieFile' from the outputs of the typechecker but don't --- read the source file again from disk. -mkHieFileWithSource :: FilePath - -> BS.ByteString - -> ModSummary - -> TcGblEnv - -> RenamedSource -> Hsc HieFile -mkHieFileWithSource src_file src ms ts rs = do - let tc_binds = tcg_binds ts - top_ev_binds = tcg_ev_binds ts - insts = tcg_insts ts - tcs = tcg_tcs ts - hsc_env <- Hsc $ \e w -> return (e, w) - (_msgs, res) <- liftIO $ initDs hsc_env ts $ getCompressedAsts tc_binds rs top_ev_binds insts tcs - let (asts',arr) = expectJust "mkHieFileWithSource" res - return $ HieFile - { hie_hs_file = src_file - , hie_module = ms_mod ms - , hie_types = arr - , hie_asts = asts' - -- mkIfaceExports sorts the AvailInfos for stability - , hie_exports = mkIfaceExports (tcg_exports ts) - , hie_hs_src = src - } - -getCompressedAsts :: TypecheckedSource -> RenamedSource -> Bag EvBind -> [ClsInst] -> [TyCon] - -> DsM (HieASTs TypeIndex, A.Array TypeIndex HieTypeFlat) -getCompressedAsts ts rs top_ev_binds insts tcs = do - asts <- enrichHie ts rs top_ev_binds insts tcs - return $ compressTypes asts - -enrichHie :: TypecheckedSource -> RenamedSource -> Bag EvBind -> [ClsInst] -> [TyCon] - -> DsM (HieASTs Type) -enrichHie ts (hsGrp, imports, exports, _) ev_bs insts tcs = - flip evalStateT initState $ flip runReaderT SourceInfo $ do - tasts <- toHie $ fmap (BC RegularBind ModuleScope) ts - rasts <- processGrp hsGrp - imps <- toHie $ filter (not . ideclImplicit . unLoc) imports - exps <- toHie $ fmap (map $ IEC Export . fst) exports - -- Add Instance bindings - forM_ insts $ \i -> - addUnlocatedEvBind (is_dfun i) (EvidenceVarBind (EvInstBind False (is_cls_nm i)) ModuleScope Nothing) - -- Add class parent bindings - forM_ tcs $ \tc -> - case tyConClass_maybe tc of - Nothing -> pure () - Just c -> forM_ (classSCSelIds c) $ \v -> - addUnlocatedEvBind v (EvidenceVarBind (EvInstBind True (className c)) ModuleScope Nothing) - let spanFile file children = case children of - [] -> realSrcLocSpan (mkRealSrcLoc file 1 1) - _ -> mkRealSrcSpan (realSrcSpanStart $ nodeSpan $ head children) - (realSrcSpanEnd $ nodeSpan $ last children) - - flat_asts = concat - [ tasts - , rasts - , imps - , exps - ] - - modulify (HiePath file) xs' = do - - top_ev_asts :: [HieAST Type] <- do - let - l :: SrcSpanAnnA - l = noAnnSrcSpan (RealSrcSpan (realSrcLocSpan $ mkRealSrcLoc file 1 1) Nothing) - toHie $ EvBindContext ModuleScope Nothing - $ L l (EvBinds ev_bs) - - (uloc_evs,more_ev_asts) <- getUnlocatedEvBinds file - - let xs = mergeSortAsts $ xs' ++ top_ev_asts ++ more_ev_asts - span = spanFile file xs - - moduleInfo = SourcedNodeInfo - $ M.singleton SourceInfo - $ (simpleNodeInfo "Module" "Module") - {nodeIdentifiers = uloc_evs} - - moduleNode = Node moduleInfo span [] - - case mergeSortAsts $ moduleNode : xs of - [x] -> return x - xs -> panicDoc "enrichHie: mergeSortAsts retur:ed more than one result" (ppr $ map nodeSpan xs) - - asts' <- sequence - $ M.mapWithKey modulify - $ M.fromListWith (++) - $ map (\x -> (HiePath (srcSpanFile (nodeSpan x)),[x])) flat_asts - - let asts = HieASTs $ resolveTyVarScopes asts' - return asts - where - processGrp grp = concatM - [ toHie $ fmap (RS ModuleScope ) hs_valds grp - , toHie $ hs_splcds grp - , toHie $ hs_tyclds grp - , toHie $ hs_derivds grp - , toHie $ hs_fixds grp - , toHie $ hs_defds grp - , toHie $ hs_fords grp - , toHie $ hs_warnds grp - , toHie $ hs_annds grp - , toHie $ hs_ruleds grp - ] - -getRealSpanA :: SrcSpanAnn' ann -> Maybe Span -getRealSpanA la = getRealSpan (locA la) - -getRealSpan :: SrcSpan -> Maybe Span -getRealSpan (RealSrcSpan sp _) = Just sp -getRealSpan _ = Nothing - -grhss_span :: (Anno (GRHS (GhcPass p) (LocatedA (body (GhcPass p)))) ~ SrcSpan - , Data (HsLocalBinds (GhcPass p))) - => GRHSs (GhcPass p) (LocatedA (body (GhcPass p))) -> SrcSpan -grhss_span (GRHSs _ xs bs) = foldl' combineSrcSpans (spanHsLocaLBinds bs) (map getLoc xs) - -bindingsOnly :: [Context Name] -> HieM [HieAST a] -bindingsOnly [] = pure [] -bindingsOnly (C c n : xs) = do - org <- ask - rest <- bindingsOnly xs - pure $ case nameSrcSpan n of - RealSrcSpan span _ -> Node (mkSourcedNodeInfo org nodeinfo) span [] : rest - where nodeinfo = NodeInfo S.empty [] (M.singleton (Right n) info) - info = mempty{identInfo = S.singleton c} - _ -> rest - -concatM :: Monad m => [m [a]] -> m [a] -concatM xs = concat <$> sequence xs - -{- Note [Capturing Scopes and other non local information] - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -toHie is a local transformation, but scopes of bindings cannot be known locally, -hence we have to push the relevant info down into the binding nodes. -We use the following types (*Context and *Scoped) to wrap things and -carry the required info -(Maybe Span) always carries the span of the entire binding, including rhs --} -data Context a = C ContextInfo a -- Used for names and bindings - -data RContext a = RC RecFieldContext a -data RFContext a = RFC RecFieldContext (Maybe Span) a --- ^ context for record fields - -data IEContext a = IEC IEType a --- ^ context for imports/exports - -data BindContext a = BC BindType Scope a --- ^ context for imports/exports - -data PatSynFieldContext a = PSC (Maybe Span) a --- ^ context for pattern synonym fields. - -data SigContext a = SC SigInfo a --- ^ context for type signatures - -data SigInfo = SI SigType (Maybe Span) - -data SigType = BindSig | ClassSig | InstSig - -data EvBindContext a = EvBindContext Scope (Maybe Span) a - -data RScoped a = RS Scope a --- ^ Scope spans over everything to the right of a, (mostly) not --- including a itself --- (Includes a in a few special cases like recursive do bindings) or --- let/where bindings - --- | Pattern scope -data PScoped a = PS (Maybe Span) - Scope -- ^ use site of the pattern - Scope -- ^ pattern to the right of a, not including a - a - deriving (Data) -- Pattern Scope - -{- Note [TyVar Scopes] - ~~~~~~~~~~~~~~~~~~~ -Due to -XScopedTypeVariables, type variables can be in scope quite far from -their original binding. We resolve the scope of these type variables -in a separate pass --} -data TScoped a = TS TyVarScope a -- TyVarScope - -data TVScoped a = TVS TyVarScope Scope a -- TyVarScope --- ^ First scope remains constant --- Second scope is used to build up the scope of a tyvar over --- things to its right, ala RScoped - --- | Each element scopes over the elements to the right -listScopes :: Scope -> [LocatedA a] -> [RScoped (LocatedA a)] -listScopes _ [] = [] -listScopes rhsScope [pat] = [RS rhsScope pat] -listScopes rhsScope (pat : pats) = RS sc pat : pats' - where - pats'@((RS scope p):_) = listScopes rhsScope pats - sc = combineScopes scope $ mkScope $ getLocA p - --- | 'listScopes' specialised to 'PScoped' things -patScopes - :: Maybe Span - -> Scope - -> Scope - -> [LPat (GhcPass p)] - -> [PScoped (LPat (GhcPass p))] -patScopes rsp useScope patScope xs = - map (\(RS sc a) -> PS rsp useScope sc a) $ - listScopes patScope xs - --- | 'listScopes' specialised to 'HsPatSigType' -tScopes - :: Scope - -> Scope - -> [HsPatSigType (GhcPass a)] - -> [TScoped (HsPatSigType (GhcPass a))] -tScopes scope rhsScope xs = - map (\(RS sc a) -> TS (ResolvedScopes [scope, sc]) (unLoc a)) $ - listScopes rhsScope (map (\hsps -> L (getLoc $ hsps_body hsps) hsps) xs) - -- We make the HsPatSigType into a Located one by using the location of the underlying LHsType. - -- We then strip off the redundant location information afterward, and take the union of the given scope and those to the right when forming the TS. - --- | 'listScopes' specialised to 'TVScoped' things -tvScopes - :: TyVarScope - -> Scope - -> [LHsTyVarBndr flag (GhcPass a)] - -> [TVScoped (LHsTyVarBndr flag (GhcPass a))] -tvScopes tvScope rhsScope xs = - map (\(RS sc a)-> TVS tvScope sc a) $ listScopes rhsScope xs - -{- Note [Scoping Rules for SigPat] - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Explicitly quantified variables in pattern type signatures are not -brought into scope in the rhs, but implicitly quantified variables -are (HsWC and HsIB). -This is unlike other signatures, where explicitly quantified variables -are brought into the RHS Scope -For example -foo :: forall a. ...; -foo = ... -- a is in scope here - -bar (x :: forall a. a -> a) = ... -- a is not in scope here --- ^ a is in scope here (pattern body) - -bax (x :: a) = ... -- a is in scope here - -This case in handled in the instance for HsPatSigType --} - -class HasLoc a where - -- ^ conveniently calculate locations for things without locations attached - loc :: a -> SrcSpan - -instance HasLoc thing => HasLoc (PScoped thing) where - loc (PS _ _ _ a) = loc a - -instance HasLoc (Located a) where - loc (L l _) = l - -instance HasLoc (LocatedA a) where - loc (L la _) = locA la - -instance HasLoc (LocatedN a) where - loc (L la _) = locA la - -instance HasLoc a => HasLoc [a] where - loc [] = noSrcSpan - loc xs = foldl1' combineSrcSpans $ map loc xs - -instance (HasLoc a, HiePass p) => HasLoc (FamEqn (GhcPass p) a) where - loc (FamEqn _ a outer_bndrs b _ c) = case outer_bndrs of - HsOuterImplicit{} -> - foldl1' combineSrcSpans [loc a, loc b, loc c] - HsOuterExplicit{hso_bndrs = tvs} -> - foldl1' combineSrcSpans [loc a, loc tvs, loc b, loc c] - -instance (HasLoc tm, HasLoc ty) => HasLoc (HsArg tm ty) where - loc (HsValArg tm) = loc tm - loc (HsTypeArg _ ty) = loc ty - loc (HsArgPar sp) = sp - -instance HasLoc (HsDataDefn GhcRn) where - loc def@(HsDataDefn{}) = loc $ dd_cons def - -- Only used for data family instances, so we only need rhs - -- Most probably the rest will be unhelpful anyway - --- | The main worker class --- See Note [Updating HieAst for changes in the GHC AST] for more information --- on how to add/modify instances for this. -class ToHie a where - toHie :: a -> HieM [HieAST Type] - --- | Used to collect type info -class HasType a where - getTypeNode :: a -> HieM [HieAST Type] - -instance ToHie Void where - toHie v = absurd v - -instance (ToHie a) => ToHie [a] where - toHie = concatMapM toHie - -instance (ToHie a) => ToHie (Bag a) where - toHie = toHie . bagToList - -instance (ToHie a) => ToHie (Maybe a) where - toHie = maybe (pure []) toHie - -instance ToHie (IEContext (LocatedA ModuleName)) where - toHie (IEC c (L (SrcSpanAnn _ (RealSrcSpan span _)) mname)) = do - org <- ask - pure $ [Node (mkSourcedNodeInfo org $ NodeInfo S.empty [] idents) span []] - where details = mempty{identInfo = S.singleton (IEThing c)} - idents = M.singleton (Left mname) details - toHie _ = pure [] - -instance ToHie (Context (Located a)) => ToHie (Context (LocatedN a)) where - toHie (C c (L l a)) = toHie (C c (L (locA l) a)) - -instance ToHie (Context (Located a)) => ToHie (Context (LocatedA a)) where - toHie (C c (L l a)) = toHie (C c (L (locA l) a)) - -instance ToHie (Context (Located Var)) where - toHie c = case c of - C context (L (RealSrcSpan span _) name') - | varUnique name' == mkBuiltinUnique 1 -> pure [] - -- `mkOneRecordSelector` makes a field var using this unique, which we ignore - | otherwise -> do - m <- lift $ gets name_remapping - org <- ask - let name = case lookupNameEnv m (varName name') of - Just var -> var - Nothing-> name' - ty = case isDataConId_maybe name' of - Nothing -> varType name' - Just dc -> dataConNonlinearType dc - pure - [Node - (mkSourcedNodeInfo org $ NodeInfo S.empty [] $ - M.singleton (Right $ varName name) - (IdentifierDetails (Just ty) - (S.singleton context))) - span - []] - C (EvidenceVarBind i _ sp) (L _ name) -> do - addUnlocatedEvBind name (EvidenceVarBind i ModuleScope sp) - pure [] - _ -> pure [] - -instance ToHie (Context (Located Name)) where - toHie c = case c of - C context (L (RealSrcSpan span _) name') - | nameUnique name' == mkBuiltinUnique 1 -> pure [] - -- `mkOneRecordSelector` makes a field var using this unique, which we ignore - | otherwise -> do - m <- lift $ gets name_remapping - org <- ask - let name = case lookupNameEnv m name' of - Just var -> varName var - Nothing -> name' - pure - [Node - (mkSourcedNodeInfo org $ NodeInfo S.empty [] $ - M.singleton (Right name) - (IdentifierDetails Nothing - (S.singleton context))) - span - []] - _ -> pure [] - -evVarsOfTermList :: EvTerm -> [EvId] -evVarsOfTermList (EvExpr e) = exprSomeFreeVarsList isEvVar e -evVarsOfTermList (EvTypeable _ ev) = - case ev of - EvTypeableTyCon _ e -> concatMap evVarsOfTermList e - EvTypeableTyApp e1 e2 -> concatMap evVarsOfTermList [e1,e2] - EvTypeableTrFun e1 e2 e3 -> concatMap evVarsOfTermList [e1,e2,e3] - EvTypeableTyLit e -> evVarsOfTermList e -evVarsOfTermList (EvFun{}) = [] - -instance ToHie (EvBindContext (LocatedA TcEvBinds)) where - toHie (EvBindContext sc sp (L span (EvBinds bs))) - = concatMapM go $ bagToList bs - where - go evbind = do - let evDeps = evVarsOfTermList $ eb_rhs evbind - depNames = EvBindDeps $ map varName evDeps - concatM $ - [ toHie (C (EvidenceVarBind (EvLetBind depNames) (combineScopes sc (mkScopeA span)) sp) - (L span $ eb_lhs evbind)) - , toHie $ map (C EvidenceVarUse . L span) $ evDeps - ] - toHie _ = pure [] - -instance ToHie (LocatedA HsWrapper) where - toHie (L osp wrap) - = case wrap of - (WpLet bs) -> toHie $ EvBindContext (mkScopeA osp) (getRealSpanA osp) (L osp bs) - (WpCompose a b) -> concatM $ - [toHie (L osp a), toHie (L osp b)] - (WpFun a b _ _) -> concatM $ - [toHie (L osp a), toHie (L osp b)] - (WpEvLam a) -> - toHie $ C (EvidenceVarBind EvWrapperBind (mkScopeA osp) (getRealSpanA osp)) - $ L osp a - (WpEvApp a) -> - concatMapM (toHie . C EvidenceVarUse . L osp) $ evVarsOfTermList a - _ -> pure [] - -instance HiePass p => HasType (LocatedA (HsBind (GhcPass p))) where - getTypeNode (L spn bind) = - case hiePass @p of - HieRn -> makeNode bind (locA spn) - HieTc -> case bind of - FunBind{fun_id = name} -> makeTypeNode bind (locA spn) (varType $ unLoc name) - _ -> makeNode bind (locA spn) - -instance HiePass p => HasType (LocatedA (Pat (GhcPass p))) where - getTypeNode (L spn pat) = - case hiePass @p of - HieRn -> makeNodeA pat spn - HieTc -> makeTypeNodeA pat spn (hsPatType pat) - --- | This instance tries to construct 'HieAST' nodes which include the type of --- the expression. It is not yet possible to do this efficiently for all --- expression forms, so we skip filling in the type for those inputs. --- --- 'HsApp', for example, doesn't have any type information available directly on --- the node. Our next recourse would be to desugar it into a 'CoreExpr' then --- query the type of that. Yet both the desugaring call and the type query both --- involve recursive calls to the function and argument! This is particularly --- problematic when you realize that the HIE traversal will eventually visit --- those nodes too and ask for their types again. --- --- Since the above is quite costly, we just skip cases where computing the --- expression's type is going to be expensive. --- --- See #16233 -instance HiePass p => HasType (LocatedA (HsExpr (GhcPass p))) where - getTypeNode e@(L spn e') = - case hiePass @p of - HieRn -> makeNodeA e' spn - HieTc -> - -- Some expression forms have their type immediately available - let tyOpt = case e' of - HsUnboundVar (HER _ ty _) _ -> Just ty - HsLit _ l -> Just (hsLitType l) - HsOverLit _ o -> Just (overLitType o) - - HsConLikeOut _ (RealDataCon con) -> Just (dataConNonlinearType con) - - HsLam _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) - HsLamCase _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) - HsCase _ _ (MG { mg_ext = groupTy }) -> Just (mg_res_ty groupTy) - - ExplicitList ty _ -> Just (mkListTy ty) - ExplicitSum ty _ _ _ -> Just (mkSumTy ty) - HsDo ty _ _ -> Just ty - HsMultiIf ty _ -> Just ty - - _ -> Nothing - - in - case tyOpt of - Just t -> makeTypeNodeA e' spn t - Nothing - | skipDesugaring e' -> fallback - | otherwise -> do - (e, no_errs) <- lift $ lift $ discardWarningsDs $ askNoErrsDs $ dsLExpr e - if no_errs - then makeTypeNodeA e' spn . exprType $ e - else fallback - where - fallback = makeNodeA e' spn - - matchGroupType :: MatchGroupTc -> Type - matchGroupType (MatchGroupTc args res) = mkVisFunTys args res - - -- | Skip desugaring of these expressions for performance reasons. - -- - -- See impact on Haddock output (esp. missing type annotations or links) - -- before marking more things here as 'False'. See impact on Haddock - -- performance before marking more things as 'True'. - skipDesugaring :: HsExpr GhcTc -> Bool - skipDesugaring e = case e of - HsVar{} -> False - HsConLikeOut{} -> False - HsRecFld{} -> False - HsOverLabel{} -> False - HsIPVar{} -> False - XExpr (WrapExpr {}) -> False - -- CHANGED: the line below makes record-dot-syntax types work - XExpr (ExpansionExpr {}) -> False - _ -> True - -data HiePassEv p where - HieRn :: HiePassEv 'Renamed - HieTc :: HiePassEv 'Typechecked - -class ( IsPass p - , HiePass (NoGhcTcPass p) - , ModifyState (IdGhcP p) - , Data (GRHS (GhcPass p) (LocatedA (HsExpr (GhcPass p)))) - , Data (Match (GhcPass p) (LocatedA (HsExpr (GhcPass p)))) - , Data (Match (GhcPass p) (LocatedA (HsCmd (GhcPass p)))) - , Data (Stmt (GhcPass p) (LocatedA (HsExpr (GhcPass p)))) - , Data (Stmt (GhcPass p) (LocatedA (HsCmd (GhcPass p)))) - , Data (HsExpr (GhcPass p)) - , Data (HsCmd (GhcPass p)) - , Data (AmbiguousFieldOcc (GhcPass p)) - , Data (HsCmdTop (GhcPass p)) - , Data (GRHS (GhcPass p) (LocatedA (HsCmd (GhcPass p)))) - , Data (HsSplice (GhcPass p)) - , Data (HsLocalBinds (GhcPass p)) - , Data (FieldOcc (GhcPass p)) - , Data (HsTupArg (GhcPass p)) - , Data (IPBind (GhcPass p)) - , ToHie (Context (Located (IdGhcP p))) - , ToHie (RFContext (Located (AmbiguousFieldOcc (GhcPass p)))) - , ToHie (RFContext (Located (FieldOcc (GhcPass p)))) - , ToHie (TScoped (LHsWcType (GhcPass (NoGhcTcPass p)))) - , ToHie (TScoped (LHsSigWcType (GhcPass (NoGhcTcPass p)))) - , Anno (IdGhcP p) ~ SrcSpanAnnN - ) - => HiePass p where - hiePass :: HiePassEv p - -instance HiePass 'Renamed where - hiePass = HieRn -instance HiePass 'Typechecked where - hiePass = HieTc - -instance ToHie (Context (Located NoExtField)) where - toHie _ = pure [] - -type AnnoBody p body - = ( Anno (Match (GhcPass p) (LocatedA (body (GhcPass p)))) - ~ SrcSpanAnnA - , Anno [LocatedA (Match (GhcPass p) (LocatedA (body (GhcPass p))))] - ~ SrcSpanAnnL - , Anno (GRHS (GhcPass p) (LocatedA (body (GhcPass p)))) - ~ SrcSpan - , Anno (StmtLR (GhcPass p) (GhcPass p) (LocatedA (body (GhcPass p)))) ~ SrcSpanAnnA - - , Data (body (GhcPass p)) - , Data (Match (GhcPass p) (LocatedA (body (GhcPass p)))) - , Data (GRHS (GhcPass p) (LocatedA (body (GhcPass p)))) - , Data (Stmt (GhcPass p) (LocatedA (body (GhcPass p)))) - - , IsPass p - ) - -instance HiePass p => ToHie (BindContext (LocatedA (HsBind (GhcPass p)))) where - toHie (BC context scope b@(L span bind)) = - concatM $ getTypeNode b : case bind of - FunBind{fun_id = name, fun_matches = matches, fun_ext = wrap} -> - [ toHie $ C (ValBind context scope $ getRealSpanA span) name - , toHie matches - , case hiePass @p of - HieTc -> toHie $ L span wrap - _ -> pure [] - ] - PatBind{pat_lhs = lhs, pat_rhs = rhs} -> - [ toHie $ PS (getRealSpan (locA span)) scope NoScope lhs - , toHie rhs - ] - VarBind{var_rhs = expr} -> - [ toHie expr - ] - AbsBinds{ abs_exports = xs, abs_binds = binds - , abs_ev_binds = ev_binds - , abs_ev_vars = ev_vars } -> - [ lift (modify (modifyState xs)) >> -- Note [Name Remapping] - (toHie $ fmap (BC context scope) binds) - , toHie $ map (L span . abe_wrap) xs - , toHie $ - map (EvBindContext (mkScopeA span) (getRealSpanA span) - . L span) ev_binds - , toHie $ - map (C (EvidenceVarBind EvSigBind - (mkScopeA span) - (getRealSpanA span)) - . L span) ev_vars - ] - PatSynBind _ psb -> - [ toHie $ L (locA span) psb -- PatSynBinds only occur at the top level - ] - -instance ( HiePass p - , AnnoBody p body - , ToHie (LocatedA (body (GhcPass p))) - ) => ToHie (MatchGroup (GhcPass p) (LocatedA (body (GhcPass p)))) where - toHie mg = case mg of - MG{ mg_alts = (L span alts) , mg_origin = origin} -> - local (setOrigin origin) $ concatM - [ locOnly (locA span) - , toHie alts - ] - -setOrigin :: Origin -> NodeOrigin -> NodeOrigin -setOrigin FromSource _ = SourceInfo -setOrigin Generated _ = GeneratedInfo - -instance HiePass p => ToHie (Located (PatSynBind (GhcPass p) (GhcPass p))) where - toHie (L sp psb) = concatM $ case psb of - PSB{psb_id=var, psb_args=dets, psb_def=pat, psb_dir=dir} -> - [ toHie $ C (Decl PatSynDec $ getRealSpan sp) var - , toHie $ toBind dets - , toHie $ PS Nothing lhsScope patScope pat - , toHie dir - ] - where - lhsScope = combineScopes varScope detScope - varScope = mkLScopeN var - patScope = mkScopeA $ getLoc pat - detScope = case dets of - (PrefixCon _ args) -> foldr combineScopes NoScope $ map mkLScopeN args - (InfixCon a b) -> combineScopes (mkLScopeN a) (mkLScopeN b) - (RecCon r) -> foldr go NoScope r - go (RecordPatSynField a b) c = combineScopes c - $ combineScopes (mkLScopeN (rdrNameFieldOcc a)) (mkLScopeN b) - detSpan = case detScope of - LocalScope a -> Just a - _ -> Nothing - -- CHANGED: removed ASSERT - -- toBind (PrefixCon ts args) = ASSERT(null ts) PrefixCon ts $ map (C Use) args - toBind (PrefixCon ts args) = PrefixCon ts $ map (C Use) args - toBind (InfixCon a b) = InfixCon (C Use a) (C Use b) - toBind (RecCon r) = RecCon $ map (PSC detSpan) r - -instance HiePass p => ToHie (HsPatSynDir (GhcPass p)) where - toHie dir = case dir of - ExplicitBidirectional mg -> toHie mg - _ -> pure [] - -instance ( HiePass p - , Data (body (GhcPass p)) - , AnnoBody p body - , ToHie (LocatedA (body (GhcPass p))) - ) => ToHie (LocatedA (Match (GhcPass p) (LocatedA (body (GhcPass p))))) where - toHie (L span m ) = concatM $ node : case m of - Match{m_ctxt=mctx, m_pats = pats, m_grhss = grhss } -> - [ toHie mctx - , let rhsScope = mkScope $ grhss_span grhss - in toHie $ patScopes Nothing rhsScope NoScope pats - , toHie grhss - ] - where - node = case hiePass @p of - HieTc -> makeNodeA m span - HieRn -> makeNodeA m span - -instance HiePass p => ToHie (HsMatchContext (GhcPass p)) where - toHie (FunRhs{mc_fun=name}) = toHie $ C MatchBind name - toHie (StmtCtxt a) = toHie a - toHie _ = pure [] - -instance HiePass p => ToHie (HsStmtContext (GhcPass p)) where - toHie (PatGuard a) = toHie a - toHie (ParStmtCtxt a) = toHie a - toHie (TransStmtCtxt a) = toHie a - toHie _ = pure [] - -instance HiePass p => ToHie (PScoped (LocatedA (Pat (GhcPass p)))) where - toHie (PS rsp scope pscope lpat@(L ospan opat)) = - concatM $ getTypeNode lpat : case opat of - WildPat _ -> - [] - VarPat _ lname -> - [ toHie $ C (PatternBind scope pscope rsp) lname - ] - LazyPat _ p -> - [ toHie $ PS rsp scope pscope p - ] - AsPat _ lname pat -> - [ toHie $ C (PatternBind scope - (combineScopes (mkLScopeA pat) pscope) - rsp) - lname - , toHie $ PS rsp scope pscope pat - ] - ParPat _ pat -> - [ toHie $ PS rsp scope pscope pat - ] - BangPat _ pat -> - [ toHie $ PS rsp scope pscope pat - ] - ListPat _ pats -> - [ toHie $ patScopes rsp scope pscope pats - ] - TuplePat _ pats _ -> - [ toHie $ patScopes rsp scope pscope pats - ] - SumPat _ pat _ _ -> - [ toHie $ PS rsp scope pscope pat - ] - ConPat {pat_con = con, pat_args = dets, pat_con_ext = ext} -> - case hiePass @p of - HieTc -> - [ toHie $ C Use $ fmap conLikeName con - , toHie $ contextify dets - , let ev_binds = cpt_binds ext - ev_vars = cpt_dicts ext - wrap = cpt_wrap ext - evscope = mkScopeA ospan `combineScopes` scope `combineScopes` pscope - in concatM [ toHie $ EvBindContext scope rsp $ L ospan ev_binds - , toHie $ L ospan wrap - , toHie $ map (C (EvidenceVarBind EvPatternBind evscope rsp) - . L ospan) ev_vars - ] - ] - HieRn -> - [ toHie $ C Use con - , toHie $ contextify dets - ] - ViewPat _ expr pat -> - [ toHie expr - , toHie $ PS rsp scope pscope pat - ] - SplicePat _ sp -> - [ toHie $ L ospan sp - ] - LitPat _ _ -> - [] - NPat _ _ _ _ -> - [] - NPlusKPat _ n _ _ _ _ -> - [ toHie $ C (PatternBind scope pscope rsp) n - ] - SigPat _ pat sig -> - [ toHie $ PS rsp scope pscope pat - , case hiePass @p of - HieTc -> - let cscope = mkLScopeA pat in - toHie $ TS (ResolvedScopes [cscope, scope, pscope]) - sig - HieRn -> pure [] - ] - XPat e -> - case hiePass @p of - HieTc -> - let CoPat wrap pat _ = e - in [ toHie $ L ospan wrap - , toHie $ PS rsp scope pscope $ (L ospan pat) - ] - where - contextify :: a ~ LPat (GhcPass p) => HsConDetails (HsPatSigType (NoGhcTc (GhcPass p))) a (HsRecFields (GhcPass p) a) - -> HsConDetails (TScoped (HsPatSigType (NoGhcTc (GhcPass p)))) (PScoped a) (RContext (HsRecFields (GhcPass p) (PScoped a))) - contextify (PrefixCon tyargs args) = PrefixCon (tScopes scope argscope tyargs) (patScopes rsp scope pscope args) - where argscope = foldr combineScopes NoScope $ map mkLScopeA args - contextify (InfixCon a b) = InfixCon a' b' - where [a', b'] = patScopes rsp scope pscope [a,b] - contextify (RecCon r) = RecCon $ RC RecFieldMatch $ contextify_rec r - contextify_rec (HsRecFields fds a) = HsRecFields (map go scoped_fds) a - where - go :: RScoped (LocatedA (HsRecField' id a1)) - -> LocatedA (HsRecField' id (PScoped a1)) -- AZ - go (RS fscope (L spn (HsRecField x lbl pat pun))) = - L spn $ HsRecField x lbl (PS rsp scope fscope pat) pun - scoped_fds = listScopes pscope fds - -instance ToHie (TScoped (HsPatSigType GhcRn)) where - toHie (TS sc (HsPS (HsPSRn wcs tvs) body@(L span _))) = concatM $ - [ bindingsOnly $ map (C $ TyVarBind (mkScopeA span) sc) (wcs++tvs) - , toHie body - ] - -- See Note [Scoping Rules for SigPat] - -instance ( ToHie (LocatedA (body (GhcPass p))) - , HiePass p - , AnnoBody p body - ) => ToHie (GRHSs (GhcPass p) (LocatedA (body (GhcPass p)))) where - toHie grhs = concatM $ case grhs of - GRHSs _ grhss binds -> - [ toHie grhss - , toHie $ RS (mkScope $ grhss_span grhs) binds - ] - -instance ( ToHie (LocatedA (body (GhcPass p))) - , HiePass p - , AnnoBody p body - ) => ToHie (Located (GRHS (GhcPass p) (LocatedA (body (GhcPass p))))) where - toHie (L span g) = concatM $ node : case g of - GRHS _ guards body -> - [ toHie $ listScopes (mkLScopeA body) guards - , toHie body - ] - where - node = case hiePass @p of - HieRn -> makeNode g span - HieTc -> makeNode g span - -instance HiePass p => ToHie (LocatedA (HsExpr (GhcPass p))) where - toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of - HsVar _ (L _ var) -> - [ toHie $ C Use (L mspan var) - -- Patch up var location since typechecker removes it - ] - HsUnboundVar _ _ -> [] -- there is an unbound name here, but that causes trouble - HsConLikeOut _ con -> - [ toHie $ C Use $ L mspan $ conLikeName con - ] - HsRecFld _ fld -> - [ toHie $ RFC RecFieldOcc Nothing (L (locA mspan) fld) - ] - HsOverLabel {} -> [] - HsIPVar _ _ -> [] - HsOverLit _ _ -> [] - HsLit _ _ -> [] - HsLam _ mg -> - [ toHie mg - ] - HsLamCase _ mg -> - [ toHie mg - ] - HsApp _ a b -> - [ toHie a - , toHie b - ] - HsAppType _ expr sig -> - [ toHie expr - , toHie $ TS (ResolvedScopes []) sig - ] - OpApp _ a b c -> - [ toHie a - , toHie b - , toHie c - ] - NegApp _ a _ -> - [ toHie a - ] - HsPar _ a -> - [ toHie a - ] - SectionL _ a b -> - [ toHie a - , toHie b - ] - SectionR _ a b -> - [ toHie a - , toHie b - ] - ExplicitTuple _ args _ -> - [ toHie args - ] - ExplicitSum _ _ _ expr -> - [ toHie expr - ] - HsCase _ expr matches -> - [ toHie expr - , toHie matches - ] - HsIf _ a b c -> - [ toHie a - , toHie b - , toHie c - ] - HsMultiIf _ grhss -> - [ toHie grhss - ] - HsLet _ binds expr -> - [ toHie $ RS (mkLScopeA expr) binds - , toHie expr - ] - HsDo _ _ (L ispan stmts) -> - [ locOnly (locA ispan) - , toHie $ listScopes NoScope stmts - ] - ExplicitList _ exprs -> - [ toHie exprs - ] - RecordCon { rcon_con = con, rcon_flds = binds} -> - [ toHie $ C Use $ con_name - , toHie $ RC RecFieldAssign $ binds - ] - where - con_name :: LocatedN Name - con_name = case hiePass @p of -- Like ConPat - HieRn -> con - HieTc -> fmap conLikeName con - RecordUpd {rupd_expr = expr, rupd_flds = Left upds}-> - [ toHie expr - , toHie $ map (RC RecFieldAssign) upds - ] - RecordUpd {rupd_expr = expr, rupd_flds = Right _}-> - [ toHie expr - ] - ExprWithTySig _ expr sig -> - [ toHie expr - , toHie $ TS (ResolvedScopes [mkLScopeA expr]) sig - ] - ArithSeq _ _ info -> - [ toHie info - ] - HsPragE _ _ expr -> - [ toHie expr - ] - HsProc _ pat cmdtop -> - [ toHie $ PS Nothing (mkLScope cmdtop) NoScope pat - , toHie cmdtop - ] - HsStatic _ expr -> - [ toHie expr - ] - HsTick _ _ expr -> - [ toHie expr - ] - HsBinTick _ _ _ expr -> - [ toHie expr - ] - HsBracket _ b -> - [ toHie b - ] - HsRnBracketOut _ b p -> - [ toHie b - , toHie p - ] - HsTcBracketOut _ _wrap b p -> - [ toHie b - , toHie p - ] - HsSpliceE _ x -> - [ toHie $ L mspan x - ] - HsGetField {} -> [] - HsProjection {} -> [] - XExpr x - | GhcTc <- ghcPass @p - , WrapExpr (HsWrap w a) <- x - -> [ toHie $ L mspan a - , toHie (L mspan w) - ] - | GhcTc <- ghcPass @p - , ExpansionExpr (HsExpanded _ b) <- x - -> [ toHie (L mspan b) - ] - | otherwise -> [] - --- NOTE: no longer have the location -instance HiePass p => ToHie (HsTupArg (GhcPass p)) where - toHie arg = concatM $ case arg of - Present _ expr -> - [ toHie expr - ] - Missing _ -> [] - -instance ( ToHie (LocatedA (body (GhcPass p))) - , AnnoBody p body - , HiePass p - ) => ToHie (RScoped (LocatedA (Stmt (GhcPass p) (LocatedA (body (GhcPass p)))))) where - toHie (RS scope (L span stmt)) = concatM $ node : case stmt of - LastStmt _ body _ _ -> - [ toHie body - ] - BindStmt _ pat body -> - [ toHie $ PS (getRealSpan $ getLocA body) scope NoScope pat - , toHie body - ] - ApplicativeStmt _ stmts _ -> - [ concatMapM (toHie . RS scope . snd) stmts - ] - BodyStmt _ body _ _ -> - [ toHie body - ] - LetStmt _ binds -> - [ toHie $ RS scope binds - ] - ParStmt _ parstmts _ _ -> - [ concatMapM (\(ParStmtBlock _ stmts _ _) -> - toHie $ listScopes NoScope stmts) - parstmts - ] - TransStmt {trS_stmts = stmts, trS_using = using, trS_by = by} -> - [ toHie $ listScopes scope stmts - , toHie using - , toHie by - ] - RecStmt {recS_stmts = L _ stmts} -> - [ toHie $ map (RS $ combineScopes scope (mkScope (locA span))) stmts - ] - where - node = case hiePass @p of - HieTc -> makeNodeA stmt span - HieRn -> makeNodeA stmt span - -instance HiePass p => ToHie (RScoped (HsLocalBinds (GhcPass p))) where - toHie (RS scope binds) = concatM $ makeNode binds (spanHsLocaLBinds binds) : case binds of - EmptyLocalBinds _ -> [] - HsIPBinds _ ipbinds -> case ipbinds of - IPBinds evbinds xs -> let sc = combineScopes scope $ scopeHsLocaLBinds binds - sp :: SrcSpanAnnA - sp = noAnnSrcSpan $ spanHsLocaLBinds binds in - [ - case hiePass @p of - HieTc -> toHie $ EvBindContext sc (getRealSpan $ locA sp) $ L sp evbinds - HieRn -> pure [] - , toHie $ map (RS sc) xs - ] - HsValBinds _ valBinds -> - [ - toHie $ RS (combineScopes scope (scopeHsLocaLBinds binds)) - valBinds - ] - - -scopeHsLocaLBinds :: HsLocalBinds (GhcPass p) -> Scope -scopeHsLocaLBinds (HsValBinds _ (ValBinds _ bs sigs)) - = foldr combineScopes NoScope (bsScope ++ sigsScope) - where - bsScope :: [Scope] - bsScope = map (mkScopeA . getLoc) $ bagToList bs - sigsScope :: [Scope] - sigsScope = map (mkScope . getLocA) sigs -scopeHsLocaLBinds (HsValBinds _ (XValBindsLR (NValBinds bs sigs))) - = foldr combineScopes NoScope (bsScope ++ sigsScope) - where - bsScope :: [Scope] - bsScope = map (mkScopeA . getLoc) $ concatMap (bagToList . snd) bs - sigsScope :: [Scope] - sigsScope = map (mkScope . getLocA) sigs - -scopeHsLocaLBinds (HsIPBinds _ (IPBinds _ bs)) - = foldr combineScopes NoScope (map (mkScopeA . getLoc) bs) -scopeHsLocaLBinds (EmptyLocalBinds _) = NoScope - - -instance HiePass p => ToHie (RScoped (LocatedA (IPBind (GhcPass p)))) where - toHie (RS scope (L sp bind)) = concatM $ makeNodeA bind sp : case bind of - IPBind _ (Left _) expr -> [toHie expr] - IPBind _ (Right v) expr -> - [ toHie $ C (EvidenceVarBind EvImplicitBind scope (getRealSpanA sp)) - $ L sp v - , toHie expr - ] - -instance HiePass p => ToHie (RScoped (HsValBindsLR (GhcPass p) (GhcPass p))) where - toHie (RS sc v) = concatM $ case v of - ValBinds _ binds sigs -> - [ toHie $ fmap (BC RegularBind sc) binds - , toHie $ fmap (SC (SI BindSig Nothing)) sigs - ] - XValBindsLR x -> [ toHie $ RS sc x ] - -instance HiePass p => ToHie (RScoped (NHsValBindsLR (GhcPass p))) where - toHie (RS sc (NValBinds binds sigs)) = concatM $ - [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) - , toHie $ fmap (SC (SI BindSig Nothing)) sigs - ] - -instance ( ToHie arg , HasLoc arg , Data arg - , HiePass p ) => ToHie (RContext (HsRecFields (GhcPass p) arg)) where - toHie (RC c (HsRecFields fields _)) = toHie $ map (RC c) fields - -instance ( ToHie (RFContext (Located label)) - , ToHie arg, HasLoc arg, Data arg - , Data label - ) => ToHie (RContext (LocatedA (HsRecField' label arg))) where - toHie (RC c (L span recfld)) = concatM $ makeNode recfld (locA span) : case recfld of - HsRecField _ label expr _ -> - [ toHie $ RFC c (getRealSpan $ loc expr) label - , toHie expr - ] - -instance ToHie (RFContext (Located (FieldOcc GhcRn))) where - toHie (RFC c rhs (L nspan f)) = concatM $ case f of - FieldOcc name _ -> - [ toHie $ C (RecField c rhs) (L nspan name) - ] - -instance ToHie (RFContext (Located (FieldOcc GhcTc))) where - toHie (RFC c rhs (L nspan f)) = concatM $ case f of - FieldOcc var _ -> - [ toHie $ C (RecField c rhs) (L nspan var) - ] - -instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcRn))) where - toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of - Unambiguous name _ -> - [ toHie $ C (RecField c rhs) $ L nspan name - ] - Ambiguous _name _ -> - [ ] - -instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcTc))) where - toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of - Unambiguous var _ -> - [ toHie $ C (RecField c rhs) (L nspan var) - ] - Ambiguous var _ -> - [ toHie $ C (RecField c rhs) (L nspan var) - ] - -instance HiePass p => ToHie (RScoped (ApplicativeArg (GhcPass p))) where - toHie (RS sc (ApplicativeArgOne _ pat expr _)) = concatM - [ toHie $ PS Nothing sc NoScope pat - , toHie expr - ] - toHie (RS sc (ApplicativeArgMany _ stmts _ pat _)) = concatM - [ toHie $ listScopes NoScope stmts - , toHie $ PS Nothing sc NoScope pat - ] - -instance (ToHie tyarg, ToHie arg, ToHie rec) => ToHie (HsConDetails tyarg arg rec) where - toHie (PrefixCon tyargs args) = concatM [ toHie tyargs, toHie args ] - toHie (RecCon rec) = toHie rec - toHie (InfixCon a b) = concatM [ toHie a, toHie b] - -instance ToHie (HsConDeclGADTDetails GhcRn) where - toHie (PrefixConGADT args) = toHie args - toHie (RecConGADT rec) = toHie rec - -instance HiePass p => ToHie (Located (HsCmdTop (GhcPass p))) where - toHie (L span top) = concatM $ makeNode top span : case top of - HsCmdTop _ cmd -> - [ toHie cmd - ] - -instance HiePass p => ToHie (LocatedA (HsCmd (GhcPass p))) where - toHie (L span cmd) = concatM $ makeNodeA cmd span : case cmd of - HsCmdArrApp _ a b _ _ -> - [ toHie a - , toHie b - ] - HsCmdArrForm _ a _ _ cmdtops -> - [ toHie a - , toHie cmdtops - ] - HsCmdApp _ a b -> - [ toHie a - , toHie b - ] - HsCmdLam _ mg -> - [ toHie mg - ] - HsCmdPar _ a -> - [ toHie a - ] - HsCmdCase _ expr alts -> - [ toHie expr - , toHie alts - ] - HsCmdLamCase _ alts -> - [ toHie alts - ] - HsCmdIf _ _ a b c -> - [ toHie a - , toHie b - , toHie c - ] - HsCmdLet _ binds cmd' -> - [ toHie $ RS (mkLScopeA cmd') binds - , toHie cmd' - ] - HsCmdDo _ (L ispan stmts) -> - [ locOnly (locA ispan) - , toHie $ listScopes NoScope stmts - ] - XCmd _ -> [] - -instance ToHie (TyClGroup GhcRn) where - toHie TyClGroup{ group_tyclds = classes - , group_roles = roles - , group_kisigs = sigs - , group_instds = instances } = - concatM - [ toHie classes - , toHie sigs - , toHie roles - , toHie instances - ] - -instance ToHie (LocatedA (TyClDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - FamDecl {tcdFam = fdecl} -> - [ toHie ((L span fdecl) :: LFamilyDecl GhcRn) - ] - SynDecl {tcdLName = name, tcdTyVars = vars, tcdRhs = typ} -> - [ toHie $ C (Decl SynDec $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes [mkScope $ getLocA typ]) vars - , toHie typ - ] - DataDecl {tcdLName = name, tcdTyVars = vars, tcdDataDefn = defn} -> - [ toHie $ C (Decl DataDec $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes [quant_scope, rhs_scope]) vars - , toHie defn - ] - where - quant_scope = mkLScopeA $ fromMaybe (noLocA []) $ dd_ctxt defn - rhs_scope = sig_sc `combineScopes` con_sc `combineScopes` deriv_sc - sig_sc = maybe NoScope mkLScopeA $ dd_kindSig defn - con_sc = foldr combineScopes NoScope $ map mkLScopeA $ dd_cons defn - deriv_sc = foldr combineScopes NoScope $ map mkLScope $ dd_derivs defn - ClassDecl { tcdCtxt = context - , tcdLName = name - , tcdTyVars = vars - , tcdFDs = deps - , tcdSigs = sigs - , tcdMeths = meths - , tcdATs = typs - , tcdATDefs = deftyps - } -> - [ toHie $ C (Decl ClassDec $ getRealSpanA span) name - , toHie context - , toHie $ TS (ResolvedScopes [context_scope, rhs_scope]) vars - , toHie deps - , toHie $ map (SC $ SI ClassSig $ getRealSpanA span) sigs - , toHie $ fmap (BC InstanceBind ModuleScope) meths - , toHie typs - , concatMapM (locOnly . getLocA) deftyps - , toHie deftyps - ] - where - context_scope = mkLScopeA $ fromMaybe (noLocA []) context - rhs_scope = foldl1' combineScopes $ map mkScope - [ loc deps, loc sigs, loc (bagToList meths), loc typs, loc deftyps] - -instance ToHie (LocatedA (FamilyDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - FamilyDecl _ info _ name vars _ sig inj -> - [ toHie $ C (Decl FamDec $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes [rhsSpan]) vars - , toHie info - , toHie $ RS injSpan sig - , toHie inj - ] - where - rhsSpan = sigSpan `combineScopes` injSpan - sigSpan = mkScope $ getLoc sig - injSpan = maybe NoScope (mkScope . getLoc) inj - -instance ToHie (FamilyInfo GhcRn) where - toHie (ClosedTypeFamily (Just eqns)) = concatM $ - [ concatMapM (locOnly . getLocA) eqns - , toHie $ map go eqns - ] - where - go (L l ib) = TS (ResolvedScopes [mkScopeA l]) ib - toHie _ = pure [] - -instance ToHie (RScoped (Located (FamilyResultSig GhcRn))) where - toHie (RS sc (L span sig)) = concatM $ makeNode sig span : case sig of - NoSig _ -> - [] - KindSig _ k -> - [ toHie k - ] - TyVarSig _ bndr -> - [ toHie $ TVS (ResolvedScopes [sc]) NoScope bndr - ] - -instance ToHie (LocatedA (FunDep GhcRn)) where - toHie (L span fd@(FunDep _ lhs rhs)) = concatM $ - [ makeNode fd (locA span) - , toHie $ map (C Use) lhs - , toHie $ map (C Use) rhs - ] - - -instance ToHie (TScoped (FamEqn GhcRn (HsDataDefn GhcRn))) where - toHie (TS _ f) = toHie f - -instance ToHie (TScoped (FamEqn GhcRn (LocatedA (HsType GhcRn)))) where - toHie (TS _ f) = toHie f - -instance (ToHie rhs, HasLoc rhs) - => ToHie (FamEqn GhcRn rhs) where - toHie fe@(FamEqn _ var outer_bndrs pats _ rhs) = concatM $ - [ toHie $ C (Decl InstDec $ getRealSpan $ loc fe) var - , toHie $ TVS (ResolvedScopes []) scope outer_bndrs - , toHie pats - , toHie rhs - ] - where scope = combineScopes patsScope rhsScope - patsScope = mkScope (loc pats) - rhsScope = mkScope (loc rhs) - -instance ToHie (Located (InjectivityAnn GhcRn)) where - toHie (L span ann) = concatM $ makeNode ann span : case ann of - InjectivityAnn _ lhs rhs -> - [ toHie $ C Use lhs - , toHie $ map (C Use) rhs - ] - -instance ToHie (HsDataDefn GhcRn) where - toHie (HsDataDefn _ _ ctx _ mkind cons derivs) = concatM - [ toHie ctx - , toHie mkind - , toHie cons - , toHie derivs - ] - -instance ToHie (Located [Located (HsDerivingClause GhcRn)]) where - toHie (L span clauses) = concatM - [ locOnly span - , toHie clauses - ] - -instance ToHie (Located (HsDerivingClause GhcRn)) where - toHie (L span cl) = concatM $ makeNode cl span : case cl of - HsDerivingClause _ strat dct -> - [ toHie strat - , toHie dct - ] - -instance ToHie (LocatedC (DerivClauseTys GhcRn)) where - toHie (L span dct) = concatM $ makeNodeA dct span : case dct of - DctSingle _ ty -> [ toHie $ TS (ResolvedScopes []) ty ] - DctMulti _ tys -> [ toHie $ map (TS (ResolvedScopes [])) tys ] - -instance ToHie (Located (DerivStrategy GhcRn)) where - toHie (L span strat) = concatM $ makeNode strat span : case strat of - StockStrategy _ -> [] - AnyclassStrategy _ -> [] - NewtypeStrategy _ -> [] - ViaStrategy s -> [ toHie (TS (ResolvedScopes []) s) ] - -instance ToHie (LocatedP OverlapMode) where - toHie (L span _) = locOnly (locA span) - -instance ToHie a => ToHie (HsScaled GhcRn a) where - toHie (HsScaled w t) = concatM [toHie (arrowToHsType w), toHie t] - -instance ToHie (LocatedA (ConDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNode decl (locA span) : case decl of - ConDeclGADT { con_names = names, con_bndrs = L outer_bndrs_loc outer_bndrs - , con_mb_cxt = ctx, con_g_args = args, con_res_ty = typ } -> - [ toHie $ map (C (Decl ConDec $ getRealSpanA span)) names - , case outer_bndrs of - HsOuterImplicit{hso_ximplicit = imp_vars} -> - bindingsOnly $ map (C $ TyVarBind (mkScopeA outer_bndrs_loc) resScope) - imp_vars - HsOuterExplicit{hso_bndrs = exp_bndrs} -> - toHie $ tvScopes resScope NoScope exp_bndrs - , toHie ctx - , toHie args - , toHie typ - ] - where - rhsScope = combineScopes argsScope tyScope - ctxScope = maybe NoScope mkLScopeA ctx - argsScope = case args of - PrefixConGADT xs -> scaled_args_scope xs - RecConGADT x -> mkLScopeA x - tyScope = mkLScopeA typ - resScope = ResolvedScopes [ctxScope, rhsScope] - ConDeclH98 { con_name = name, con_ex_tvs = qvars - , con_mb_cxt = ctx, con_args = dets } -> - [ toHie $ C (Decl ConDec $ getRealSpan (locA span)) name - , toHie $ tvScopes (ResolvedScopes []) rhsScope qvars - , toHie ctx - , toHie dets - ] - where - rhsScope = combineScopes ctxScope argsScope - ctxScope = maybe NoScope mkLScopeA ctx - argsScope = case dets of - PrefixCon _ xs -> scaled_args_scope xs - InfixCon a b -> scaled_args_scope [a, b] - RecCon x -> mkLScopeA x - where scaled_args_scope :: [HsScaled GhcRn (LHsType GhcRn)] -> Scope - scaled_args_scope = foldr combineScopes NoScope . map (mkLScopeA . hsScaledThing) - -instance ToHie (LocatedL [LocatedA (ConDeclField GhcRn)]) where - toHie (L span decls) = concatM $ - [ locOnly (locA span) - , toHie decls - ] - -instance ToHie (TScoped (HsWildCardBndrs GhcRn (LocatedA (HsSigType GhcRn)))) where - toHie (TS sc (HsWC names a)) = concatM $ - [ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names - , toHie $ TS sc a - ] - where span = loc a - -instance ToHie (TScoped (HsWildCardBndrs GhcRn (LocatedA (HsType GhcRn)))) where - toHie (TS sc (HsWC names a)) = concatM $ - [ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names - , toHie a - ] - where span = loc a - -instance ToHie (LocatedA (StandaloneKindSig GhcRn)) where - toHie (L sp sig) = concatM [makeNodeA sig sp, toHie sig] - -instance ToHie (StandaloneKindSig GhcRn) where - toHie sig = concatM $ case sig of - StandaloneKindSig _ name typ -> - [ toHie $ C TyDecl name - , toHie $ TS (ResolvedScopes []) typ - ] - -instance HiePass p => ToHie (SigContext (LocatedA (Sig (GhcPass p)))) where - toHie (SC (SI styp msp) (L sp sig)) = - case hiePass @p of - HieTc -> pure [] - HieRn -> concatM $ makeNodeA sig sp : case sig of - TypeSig _ names typ -> - [ toHie $ map (C TyDecl) names - , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ - ] - PatSynSig _ names typ -> - [ toHie $ map (C TyDecl) names - , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ - ] - ClassOpSig _ _ names typ -> - [ case styp of - ClassSig -> toHie $ map (C $ ClassTyDecl $ getRealSpanA sp) names - _ -> toHie $ map (C $ TyDecl) names - , toHie $ TS (UnresolvedScope (map unLoc names) msp) typ - ] - IdSig _ _ -> [] - FixSig _ fsig -> - [ toHie $ L sp fsig - ] - InlineSig _ name _ -> - [ toHie $ (C Use) name - ] - SpecSig _ name typs _ -> - [ toHie $ (C Use) name - , toHie $ map (TS (ResolvedScopes [])) typs - ] - SpecInstSig _ _ typ -> - [ toHie $ TS (ResolvedScopes []) typ - ] - MinimalSig _ _ form -> - [ toHie form - ] - SCCFunSig _ _ name mtxt -> - [ toHie $ (C Use) name - , maybe (pure []) (locOnly . getLoc) mtxt - ] - CompleteMatchSig _ _ (L ispan names) typ -> - [ locOnly ispan - , toHie $ map (C Use) names - , toHie $ fmap (C Use) typ - ] - -instance ToHie (TScoped (LocatedA (HsSigType GhcRn))) where - toHie (TS tsc (L span t@HsSig{sig_bndrs=bndrs,sig_body=body})) = concatM $ makeNodeA t span : - [ toHie (TVS tsc (mkScopeA span) bndrs) - , toHie body - ] - --- Check this -instance Data flag => ToHie (TVScoped (HsOuterTyVarBndrs flag GhcRn)) where - toHie (TVS tsc sc bndrs) = case bndrs of - HsOuterImplicit xs -> bindingsOnly $ map (C $ TyVarBind sc tsc) xs - HsOuterExplicit _ xs -> toHie $ tvScopes tsc sc xs - -instance ToHie (LocatedA (HsType GhcRn)) where - toHie (L span t) = concatM $ makeNode t (locA span) : case t of - HsForAllTy _ tele body -> - let scope = mkScope $ getLocA body in - [ case tele of - HsForAllVis { hsf_vis_bndrs = bndrs } -> - toHie $ tvScopes (ResolvedScopes []) scope bndrs - HsForAllInvis { hsf_invis_bndrs = bndrs } -> - toHie $ tvScopes (ResolvedScopes []) scope bndrs - , toHie body - ] - HsQualTy _ ctx body -> - [ toHie ctx - , toHie body - ] - HsTyVar _ _ var -> - [ toHie $ C Use var - ] - HsAppTy _ a b -> - [ toHie a - , toHie b - ] - HsAppKindTy _ ty ki -> - [ toHie ty - , toHie ki - ] - HsFunTy _ w a b -> - [ toHie (arrowToHsType w) - , toHie a - , toHie b - ] - HsListTy _ a -> - [ toHie a - ] - HsTupleTy _ _ tys -> - [ toHie tys - ] - HsSumTy _ tys -> - [ toHie tys - ] - HsOpTy _ a op b -> - [ toHie a - , toHie $ C Use op - , toHie b - ] - HsParTy _ a -> - [ toHie a - ] - HsIParamTy _ ip ty -> - [ toHie ip - , toHie ty - ] - HsKindSig _ a b -> - [ toHie a - , toHie b - ] - HsSpliceTy _ a -> - [ toHie $ L span a - ] - HsDocTy _ a _ -> - [ toHie a - ] - HsBangTy _ _ ty -> - [ toHie ty - ] - HsRecTy _ fields -> - [ toHie fields - ] - HsExplicitListTy _ _ tys -> - [ toHie tys - ] - HsExplicitTupleTy _ tys -> - [ toHie tys - ] - HsTyLit _ _ -> [] - HsWildCardTy _ -> [] - HsStarTy _ _ -> [] - XHsType _ -> [] - -instance (ToHie tm, ToHie ty) => ToHie (HsArg tm ty) where - toHie (HsValArg tm) = toHie tm - toHie (HsTypeArg _ ty) = toHie ty - toHie (HsArgPar sp) = locOnly sp - -instance Data flag => ToHie (TVScoped (LocatedA (HsTyVarBndr flag GhcRn))) where - toHie (TVS tsc sc (L span bndr)) = concatM $ makeNodeA bndr span : case bndr of - UserTyVar _ _ var -> - [ toHie $ C (TyVarBind sc tsc) var - ] - KindedTyVar _ _ var kind -> - [ toHie $ C (TyVarBind sc tsc) var - , toHie kind - ] - -instance ToHie (TScoped (LHsQTyVars GhcRn)) where - toHie (TS sc (HsQTvs implicits vars)) = concatM $ - [ bindingsOnly bindings - , toHie $ tvScopes sc NoScope vars - ] - where - varLoc = loc vars - bindings = map (C $ TyVarBind (mkScope varLoc) sc) implicits - -instance ToHie (LocatedC [LocatedA (HsType GhcRn)]) where - toHie (L span tys) = concatM $ - [ locOnly (locA span) - , toHie tys - ] - -instance ToHie (LocatedA (ConDeclField GhcRn)) where - toHie (L span field) = concatM $ makeNode field (locA span) : case field of - ConDeclField _ fields typ _ -> - [ toHie $ map (RFC RecFieldDecl (getRealSpan $ loc typ)) fields - , toHie typ - ] - -instance ToHie (LHsExpr a) => ToHie (ArithSeqInfo a) where - toHie (From expr) = toHie expr - toHie (FromThen a b) = concatM $ - [ toHie a - , toHie b - ] - toHie (FromTo a b) = concatM $ - [ toHie a - , toHie b - ] - toHie (FromThenTo a b c) = concatM $ - [ toHie a - , toHie b - , toHie c - ] - -instance ToHie (LocatedA (SpliceDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - SpliceDecl _ splice _ -> - [ toHie splice - ] - -instance ToHie (HsBracket a) where - toHie _ = pure [] - -instance ToHie PendingRnSplice where - toHie _ = pure [] - -instance ToHie PendingTcSplice where - toHie _ = pure [] - -instance ToHie (LBooleanFormula (LocatedN Name)) where - toHie (L span form) = concatM $ makeNode form (locA span) : case form of - Var a -> - [ toHie $ C Use a - ] - And forms -> - [ toHie forms - ] - Or forms -> - [ toHie forms - ] - Parens f -> - [ toHie f - ] - -instance ToHie (Located HsIPName) where - toHie (L span e) = makeNode e span - -instance HiePass p => ToHie (LocatedA (HsSplice (GhcPass p))) where - toHie (L span sp) = concatM $ makeNodeA sp span : case sp of - HsTypedSplice _ _ _ expr -> - [ toHie expr - ] - HsUntypedSplice _ _ _ expr -> - [ toHie expr - ] - HsQuasiQuote _ _ _ ispan _ -> - [ locOnly ispan - ] - HsSpliced _ _ _ -> - [] - XSplice x -> case ghcPass @p of - GhcTc -> case x of - HsSplicedT _ -> [] - -instance ToHie (LocatedA (RoleAnnotDecl GhcRn)) where - toHie (L span annot) = concatM $ makeNodeA annot span : case annot of - RoleAnnotDecl _ var roles -> - [ toHie $ C Use var - , concatMapM (locOnly . getLoc) roles - ] - -instance ToHie (LocatedA (InstDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - ClsInstD _ d -> - [ toHie $ L span d - ] - DataFamInstD _ d -> - [ toHie $ L span d - ] - TyFamInstD _ d -> - [ toHie $ L span d - ] - -instance ToHie (LocatedA (ClsInstDecl GhcRn)) where - toHie (L span decl) = concatM - [ toHie $ TS (ResolvedScopes [mkScopeA span]) $ cid_poly_ty decl - , toHie $ fmap (BC InstanceBind ModuleScope) $ cid_binds decl - , toHie $ map (SC $ SI InstSig $ getRealSpanA span) $ cid_sigs decl - , concatMapM (locOnly . getLocA) $ cid_tyfam_insts decl - , toHie $ cid_tyfam_insts decl - , concatMapM (locOnly . getLocA) $ cid_datafam_insts decl - , toHie $ cid_datafam_insts decl - , toHie $ cid_overlap_mode decl - ] - -instance ToHie (LocatedA (DataFamInstDecl GhcRn)) where - toHie (L sp (DataFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScopeA sp]) d - -instance ToHie (LocatedA (TyFamInstDecl GhcRn)) where - toHie (L sp (TyFamInstDecl _ d)) = toHie $ TS (ResolvedScopes [mkScopeA sp]) d - -instance HiePass p => ToHie (Context (FieldOcc (GhcPass p))) where - toHie (C c (FieldOcc n (L l _))) = case hiePass @p of - HieTc -> toHie (C c (L l n)) - HieRn -> toHie (C c (L l n)) - -instance HiePass p => ToHie (PatSynFieldContext (RecordPatSynField (GhcPass p))) where - toHie (PSC sp (RecordPatSynField a b)) = concatM $ - [ toHie $ C (RecField RecFieldDecl sp) a - , toHie $ C Use b - ] - -instance ToHie (LocatedA (DerivDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - DerivDecl _ typ strat overlap -> - [ toHie $ TS (ResolvedScopes []) typ - , toHie strat - , toHie overlap - ] - -instance ToHie (LocatedA (FixitySig GhcRn)) where - toHie (L span sig) = concatM $ makeNodeA sig span : case sig of - FixitySig _ vars _ -> - [ toHie $ map (C Use) vars - ] - -instance ToHie (LocatedA (DefaultDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - DefaultDecl _ typs -> - [ toHie typs - ] - -instance ToHie (LocatedA (ForeignDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - ForeignImport {fd_name = name, fd_sig_ty = sig, fd_fi = fi} -> - [ toHie $ C (ValBind RegularBind ModuleScope $ getRealSpanA span) name - , toHie $ TS (ResolvedScopes []) sig - , toHie fi - ] - ForeignExport {fd_name = name, fd_sig_ty = sig, fd_fe = fe} -> - [ toHie $ C Use name - , toHie $ TS (ResolvedScopes []) sig - , toHie fe - ] - -instance ToHie ForeignImport where - toHie (CImport (L a _) (L b _) _ _ (L c _)) = concatM $ - [ locOnly a - , locOnly b - , locOnly c - ] - -instance ToHie ForeignExport where - toHie (CExport (L a _) (L b _)) = concatM $ - [ locOnly a - , locOnly b - ] - -instance ToHie (LocatedA (WarnDecls GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - Warnings _ _ warnings -> - [ toHie warnings - ] - -instance ToHie (LocatedA (WarnDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNode decl (locA span) : case decl of - Warning _ vars _ -> - [ toHie $ map (C Use) vars - ] - -instance ToHie (LocatedA (AnnDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - HsAnnotation _ _ prov expr -> - [ toHie prov - , toHie expr - ] - -instance ToHie (AnnProvenance GhcRn) where - toHie (ValueAnnProvenance a) = toHie $ C Use a - toHie (TypeAnnProvenance a) = toHie $ C Use a - toHie ModuleAnnProvenance = pure [] - -instance ToHie (LocatedA (RuleDecls GhcRn)) where - toHie (L span decl) = concatM $ makeNodeA decl span : case decl of - HsRules _ _ rules -> - [ toHie rules - ] - -instance ToHie (LocatedA (RuleDecl GhcRn)) where - toHie (L span r@(HsRule _ rname _ tybndrs bndrs exprA exprB)) = concatM - [ makeNodeA r span - , locOnly $ getLoc rname - , toHie $ fmap (tvScopes (ResolvedScopes []) scope) tybndrs - , toHie $ map (RS $ mkScope (locA span)) bndrs - , toHie exprA - , toHie exprB - ] - where scope = bndrs_sc `combineScopes` exprA_sc `combineScopes` exprB_sc - bndrs_sc = maybe NoScope mkLScope (listToMaybe bndrs) - exprA_sc = mkLScopeA exprA - exprB_sc = mkLScopeA exprB - -instance ToHie (RScoped (Located (RuleBndr GhcRn))) where - toHie (RS sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of - RuleBndr _ var -> - [ toHie $ C (ValBind RegularBind sc Nothing) var - ] - RuleBndrSig _ var typ -> - [ toHie $ C (ValBind RegularBind sc Nothing) var - , toHie $ TS (ResolvedScopes [sc]) typ - ] - -instance ToHie (LocatedA (ImportDecl GhcRn)) where - toHie (L span decl) = concatM $ makeNode decl (locA span) : case decl of - ImportDecl { ideclName = name, ideclAs = as, ideclHiding = hidden } -> - [ toHie $ IEC Import name - , toHie $ fmap (IEC ImportAs) as - , maybe (pure []) goIE hidden - ] - where - goIE (hiding, (L sp liens)) = concatM $ - [ locOnly (locA sp) - , toHie $ map (IEC c) liens - ] - where - c = if hiding then ImportHiding else Import - -instance ToHie (IEContext (LocatedA (IE GhcRn))) where - toHie (IEC c (L span ie)) = concatM $ makeNode ie (locA span) : case ie of - IEVar _ n -> - [ toHie $ IEC c n - ] - IEThingAbs _ n -> - [ toHie $ IEC c n - ] - IEThingAll _ n -> - [ toHie $ IEC c n - ] - IEThingWith flds n _ ns -> - [ toHie $ IEC c n - , toHie $ map (IEC c) ns - , toHie $ map (IEC c) flds - ] - IEModuleContents _ n -> - [ toHie $ IEC c n - ] - IEGroup _ _ _ -> [] - IEDoc _ _ -> [] - IEDocNamed _ _ -> [] - -instance ToHie (IEContext (LIEWrappedName Name)) where - toHie (IEC c (L span iewn)) = concatM $ makeNodeA iewn span : case iewn of - IEName n -> - [ toHie $ C (IEThing c) n - ] - IEPattern _ p -> - [ toHie $ C (IEThing c) p - ] - IEType _ n -> - [ toHie $ C (IEThing c) n - ] - -instance ToHie (IEContext (Located FieldLabel)) where - toHie (IEC c (L span lbl)) = concatM - [ makeNode lbl span - , toHie $ C (IEThing c) $ L span (flSelector lbl) - ] diff --git a/hie-compat/src-reexport-ghc9/Compat/HieBin.hs b/hie-compat/src-reexport-ghc9/Compat/HieBin.hs deleted file mode 100644 index 254e1db6d3..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieBin.hs +++ /dev/null @@ -1,8 +0,0 @@ -{- -Binary serialization for .hie files. --} - -module Compat.HieBin ( module GHC.Iface.Ext.Binary) -where - -import GHC.Iface.Ext.Binary diff --git a/hie-compat/src-reexport-ghc9/Compat/HieDebug.hs b/hie-compat/src-reexport-ghc9/Compat/HieDebug.hs deleted file mode 100644 index 872da67c2b..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieDebug.hs +++ /dev/null @@ -1,10 +0,0 @@ -module Compat.HieDebug - ( module GHC.Iface.Ext.Debug - , ppHie ) where -import GHC.Iface.Ext.Debug - -import GHC.Iface.Ext.Types (HieAST) -import GHC.Utils.Outputable (Outputable(ppr), SDoc) - -ppHie :: Outputable a => HieAST a -> SDoc -ppHie = ppr diff --git a/hie-compat/src-reexport-ghc9/Compat/HieTypes.hs b/hie-compat/src-reexport-ghc9/Compat/HieTypes.hs deleted file mode 100644 index 36bb86abeb..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieTypes.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieTypes - ( module GHC.Iface.Ext.Types ) where -import GHC.Iface.Ext.Types diff --git a/hie-compat/src-reexport-ghc9/Compat/HieUtils.hs b/hie-compat/src-reexport-ghc9/Compat/HieUtils.hs deleted file mode 100644 index 204a312039..0000000000 --- a/hie-compat/src-reexport-ghc9/Compat/HieUtils.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieUtils - ( module GHC.Iface.Ext.Utils ) where -import GHC.Iface.Ext.Utils diff --git a/hie-compat/src-reexport-ghc92/Compat/HieAst.hs b/hie-compat/src-reexport-ghc92/Compat/HieAst.hs deleted file mode 100644 index 240dc4da49..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieAst.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieAst - ( module GHC.Iface.Ext.Ast ) where -import GHC.Iface.Ext.Ast diff --git a/hie-compat/src-reexport-ghc92/Compat/HieBin.hs b/hie-compat/src-reexport-ghc92/Compat/HieBin.hs deleted file mode 100644 index 254e1db6d3..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieBin.hs +++ /dev/null @@ -1,8 +0,0 @@ -{- -Binary serialization for .hie files. --} - -module Compat.HieBin ( module GHC.Iface.Ext.Binary) -where - -import GHC.Iface.Ext.Binary diff --git a/hie-compat/src-reexport-ghc92/Compat/HieDebug.hs b/hie-compat/src-reexport-ghc92/Compat/HieDebug.hs deleted file mode 100644 index 872da67c2b..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieDebug.hs +++ /dev/null @@ -1,10 +0,0 @@ -module Compat.HieDebug - ( module GHC.Iface.Ext.Debug - , ppHie ) where -import GHC.Iface.Ext.Debug - -import GHC.Iface.Ext.Types (HieAST) -import GHC.Utils.Outputable (Outputable(ppr), SDoc) - -ppHie :: Outputable a => HieAST a -> SDoc -ppHie = ppr diff --git a/hie-compat/src-reexport-ghc92/Compat/HieTypes.hs b/hie-compat/src-reexport-ghc92/Compat/HieTypes.hs deleted file mode 100644 index 36bb86abeb..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieTypes.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieTypes - ( module GHC.Iface.Ext.Types ) where -import GHC.Iface.Ext.Types diff --git a/hie-compat/src-reexport-ghc92/Compat/HieUtils.hs b/hie-compat/src-reexport-ghc92/Compat/HieUtils.hs deleted file mode 100644 index 204a312039..0000000000 --- a/hie-compat/src-reexport-ghc92/Compat/HieUtils.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieUtils - ( module GHC.Iface.Ext.Utils ) where -import GHC.Iface.Ext.Utils diff --git a/hie-compat/src-reexport/Compat/HieDebug.hs b/hie-compat/src-reexport/Compat/HieDebug.hs deleted file mode 100644 index 32da665b6d..0000000000 --- a/hie-compat/src-reexport/Compat/HieDebug.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieDebug - ( module HieDebug ) where -import HieDebug diff --git a/hie-compat/src-reexport/Compat/HieTypes.hs b/hie-compat/src-reexport/Compat/HieTypes.hs deleted file mode 100644 index 7185fb10bd..0000000000 --- a/hie-compat/src-reexport/Compat/HieTypes.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieTypes - ( module HieTypes ) where -import HieTypes diff --git a/hie-compat/src-reexport/Compat/HieUtils.hs b/hie-compat/src-reexport/Compat/HieUtils.hs deleted file mode 100644 index c4c401e269..0000000000 --- a/hie-compat/src-reexport/Compat/HieUtils.hs +++ /dev/null @@ -1,3 +0,0 @@ -module Compat.HieUtils - ( module HieUtils ) where -import HieUtils diff --git a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs index e64ab34876..4fa81a2d57 100644 --- a/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs +++ b/hls-test-utils/src/Development/IDE/Test/Diagnostic.hs @@ -69,7 +69,7 @@ requireDiagnostic actuals expected@(severity, cursor, expectedMsg, mbExpectedCod | ghcVersion >= GHC96 = case (mbExpectedCode, _code d) of (Nothing, _) -> True - (Just expectedCode, Nothing) -> False + (Just _, Nothing) -> False (Just expectedCode, Just actualCode) -> InR expectedCode == actualCode | otherwise = True diff --git a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs index 06e9d99679..b897fa5abb 100644 --- a/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs +++ b/plugins/hls-call-hierarchy-plugin/src/Ide/Plugin/CallHierarchy/Internal.hs @@ -24,6 +24,12 @@ import Development.IDE import Development.IDE.Core.Shake import Development.IDE.GHC.Compat as Compat import Development.IDE.Spans.AtPoint +import GHC.Iface.Ext.Types (ContextInfo (..), + DeclType (..), HieAST (..), + HieASTs (..), Identifier, + IdentifierDetails (..), + RecFieldContext (..), Span) +import GHC.Iface.Ext.Utils (getNameBinding) import HieDb (Symbol (Symbol)) import qualified Ide.Plugin.CallHierarchy.Query as Q import Ide.Plugin.CallHierarchy.Types diff --git a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs index ecbd495246..3f902ef80c 100644 --- a/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs +++ b/plugins/hls-class-plugin/src/Ide/Plugin/Class/CodeAction.hs @@ -36,6 +36,9 @@ import Development.IDE.GHC.Compat.Error (TcRnMessage (..), stripTcRnMessageContext) import Development.IDE.GHC.Compat.Util import Development.IDE.Spans.AtPoint (pointCommand) +import GHC.Iface.Ext.Types (ContextInfo (..), + HieAST (..), Identifier, + IdentifierDetails (..)) import Ide.Plugin.Class.ExactPrint import Ide.Plugin.Class.Types import Ide.Plugin.Class.Utils diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs index 6fa799b8d5..915a98d607 100644 --- a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/ASTPreProcess.hs @@ -19,7 +19,11 @@ import Data.Maybe (fromMaybe, mapMaybe) import Data.Semigroup (First (First, getFirst)) import Data.Semigroup.Foldable (foldlM1) import qualified Data.Set as Set -import Development.IDE.GHC.Compat hiding (nodeInfo) +import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (ContextInfo (..), HieAST (..), + Identifier, IdentifierDetails (..), + NodeInfo (nodeIdentifiers), Span) +import GHC.Iface.Ext.Utils (RefMap, flattenAst) import Prelude hiding (span) {-| diff --git a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs index 86d5923011..2391a35e1a 100644 --- a/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs +++ b/plugins/hls-code-range-plugin/src/Ide/Plugin/CodeRange/Rules.hs @@ -39,18 +39,17 @@ import qualified Data.Vector as V import Development.IDE import Development.IDE.Core.Rules (toIdeResult) import qualified Development.IDE.Core.Shake as Shake -import Development.IDE.GHC.Compat (HieAST (..), - HieASTs (getAsts), RefMap) import Development.IDE.GHC.Compat.Util import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (HieAST (..), HieASTs (..)) +import GHC.Iface.Ext.Utils (RefMap) import Ide.Plugin.CodeRange.ASTPreProcess (CustomNodeType (..), PreProcessEnv (..), isCustomNode, preProcessAST) -import Language.LSP.Protocol.Types (FoldingRangeKind (FoldingRangeKind_Comment, FoldingRangeKind_Imports, FoldingRangeKind_Region)) - import Language.LSP.Protocol.Lens (HasEnd (end), HasStart (start)) +import Language.LSP.Protocol.Types (FoldingRangeKind (FoldingRangeKind_Comment, FoldingRangeKind_Imports, FoldingRangeKind_Region)) import Prelude hiding (log) data Log = LogShake Shake.Log diff --git a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs index a761f648af..a111e9062b 100644 --- a/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs +++ b/plugins/hls-explicit-record-fields-plugin/src/Ide/Plugin/ExplicitFields.hs @@ -58,8 +58,7 @@ import Development.IDE.GHC.Compat (FieldLabel (flSelector), HsExpr (HsApp, HsVar, XExpr), HsFieldBind (hfbLHS), HsRecFields (..), - HsWrap (HsWrap), - Identifier, LPat, + HsWrap (HsWrap), LPat, Located, NamedThing (getName), Outputable, @@ -90,6 +89,7 @@ import Development.IDE.Spans.Pragmas (NextPragmaInfo (..), getFirstPragma, insertNewPragma) import GHC.Generics (Generic) +import GHC.Iface.Ext.Types (Identifier) import Ide.Logger (Priority (..), cmapWithPrio, logWith, (<+>)) diff --git a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs index 011910b880..6917d0a7a9 100644 --- a/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs +++ b/plugins/hls-qualify-imported-names-plugin/src/Ide/Plugin/QualifyImportedNames.hs @@ -32,17 +32,14 @@ import Development.IDE.Core.RuleTypes (GetFileContents (GetFileConte TcModuleResult (TcModuleResult, tmrParsed, tmrTypechecked), TypeCheck (TypeCheck)) import Development.IDE.Core.Shake (IdeState) -import Development.IDE.GHC.Compat (ContextInfo (Use), - GenLocated (..), GhcPs, +import Development.IDE.GHC.Compat (GenLocated (..), GhcPs, GlobalRdrElt, GlobalRdrEnv, HsModule (hsmodImports), - Identifier, - IdentifierDetails (IdentifierDetails, identInfo), ImpDeclSpec (ImpDeclSpec, is_as, is_dloc, is_qual), ImportSpec (ImpSpec), LImportDecl, ModuleName, Name, NameEnv, ParsedModule, - RefMap, Span, SrcSpan, + SrcSpan, TcGblEnv (tcg_rdr_env), emptyUFM, globalRdrEnvElts, gre_imp, gre_name, locA, @@ -58,6 +55,9 @@ import Development.IDE.GHC.Compat (ContextInfo (Use), srcSpanStartLine, unitUFM) import Development.IDE.Types.Location (Position (Position), Range (Range), Uri) +import GHC.Iface.Ext.Types (ContextInfo (..), Identifier, + IdentifierDetails (..), Span) +import GHC.Iface.Ext.Utils (RefMap) import Ide.Plugin.Error (PluginError (PluginRuleFailed), getNormalizedFilePathE, handleMaybe) diff --git a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs index e471d1781a..1fba6b67e5 100644 --- a/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs +++ b/plugins/hls-refactor-plugin/src/Development/IDE/Plugin/CodeAction.hs @@ -75,6 +75,8 @@ import Development.IDE.Types.Options import GHC (DeltaPos (..), EpAnn (..), LEpaComment) +import GHC.Iface.Ext.Types (ContextInfo (..), + IdentifierDetails (..)) import qualified GHC.LanguageExtensions as Lang import Ide.Logger hiding (group) diff --git a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs index 2fdbee3ebc..0ba6bc7975 100644 --- a/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs +++ b/plugins/hls-rename-plugin/src/Ide/Plugin/Rename.hs @@ -8,7 +8,6 @@ module Ide.Plugin.Rename (descriptor, E.Log) where -import Compat.HieTypes import Control.Lens ((^.)) import Control.Monad import Control.Monad.Except (ExceptT, throwError) @@ -41,6 +40,11 @@ import qualified Development.IDE.GHC.ExactPrint as E import Development.IDE.Plugin.CodeAction import Development.IDE.Spans.AtPoint import Development.IDE.Types.Location +import GHC.Iface.Ext.Types (HieAST (..), + HieASTs (..), + NodeOrigin (..), + SourcedNodeInfo (..)) +import GHC.Iface.Ext.Utils (generateReferencesMap) import HieDb ((:.) (..)) import HieDb.Query import HieDb.Types (RefRow (refIsGenerated)) diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs index b8b07e667f..1bbba24df2 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Internal.hs @@ -3,6 +3,7 @@ {-# LANGUAGE OverloadedLabels #-} {-# LANGUAGE OverloadedRecordDot #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} @@ -43,6 +44,8 @@ import Development.IDE.Core.Shake (ShakeExtras (..), getVirtualFile) import Development.IDE.GHC.Compat hiding (Warning) import Development.IDE.GHC.Compat.Util (mkFastString) +import GHC.Iface.Ext.Types (HieASTs (getAsts), + pattern HiePath) import Ide.Logger (logWith) import Ide.Plugin.Error (PluginError (PluginInternalError), getNormalizedFilePathE, diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs index d9bfc4449d..e93cefb711 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Mappings.hs @@ -20,6 +20,10 @@ import qualified Data.Set as Set import Data.Text (Text, unpack) import Development.IDE (HieKind (HieFresh, HieFromDisk)) import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (BindType (..), + ContextInfo (..), + DeclType (..), HieType (..), + HieTypeFlat, TypeIndex) import Ide.Plugin.SemanticTokens.Types import Ide.Plugin.SemanticTokens.Utils (mkRange) import Language.LSP.Protocol.Types (LspEnum (knownValues), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs index fb7fdd9e71..5875ebfa8d 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Query.hs @@ -11,6 +11,9 @@ import Data.Text (Text) import Development.IDE.Core.PositionMapping (PositionMapping, toCurrentRange) import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (ContextInfo, Identifier, + IdentifierDetails (..)) +import GHC.Iface.Ext.Utils (RefMap) import Ide.Plugin.SemanticTokens.Mappings import Ide.Plugin.SemanticTokens.Types (HieFunMaskKind, HsSemanticTokenType (TModule), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs index 2ed11be333..b6142fb39c 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Tokenize.hs @@ -22,6 +22,10 @@ import Data.Text.Utf16.Rope.Mixed (Rope) import qualified Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE.GHC.Compat import Development.IDE.GHC.Error (realSrcSpanToCodePointRange) +import GHC.Iface.Ext.Types (HieAST (..), Identifier, + NodeInfo (..), + NodeOrigin (..), + SourcedNodeInfo (..)) import Ide.Plugin.SemanticTokens.Types (HsSemanticTokenType (TModule), RangeHsSemanticTokenTypes (..)) import Language.LSP.Protocol.Types (Position (Position), diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs index 7f445bf7ac..da59c28d29 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Types.hs @@ -10,16 +10,16 @@ module Ide.Plugin.SemanticTokens.Types where import Control.DeepSeq (NFData (rnf), rwhnf) import qualified Data.Array as A import Data.Default (Default (def)) +import Data.Text (Text) import Development.IDE (Pretty (pretty), RuleResult) import qualified Development.IDE.Core.Shake as Shake import Development.IDE.GHC.Compat hiding (loc) import Development.IDE.Graph.Classes (Hashable) import GHC.Generics (Generic) -import Language.LSP.Protocol.Types --- import template haskell -import Data.Text (Text) +import GHC.Iface.Ext.Types (TypeIndex) import Ide.Plugin.Error (PluginError) import Language.Haskell.TH.Syntax (Lift) +import Language.LSP.Protocol.Types -- !!!! order of declarations matters deriving enum and ord diff --git a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs index 52cd56a21f..c545d8941a 100644 --- a/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs +++ b/plugins/hls-semantic-tokens-plugin/src/Ide/Plugin/SemanticTokens/Utils.hs @@ -10,6 +10,11 @@ import Data.ByteString.Char8 (unpack) import qualified Data.Map.Strict as Map import Development.IDE (Position (..), Range (..)) import Development.IDE.GHC.Compat +import GHC.Iface.Ext.Types (BindType (..), ContextInfo (..), + DeclType (..), Identifier, + IdentifierDetails (..), + RecFieldContext (..), Span) +import GHC.Iface.Ext.Utils (RefMap) import Prelude hiding (length, span) deriving instance Show DeclType diff --git a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs index a1efb7f150..77c9817dba 100644 --- a/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs +++ b/plugins/hls-stan-plugin/src/Ide/Plugin/Stan.hs @@ -2,7 +2,6 @@ {-# LANGUAGE PatternSynonyms #-} module Ide.Plugin.Stan (descriptor, Log) where -import Compat.HieTypes (HieFile (..)) import Control.DeepSeq (NFData) import Control.Monad (void) import Control.Monad.IO.Class (liftIO) @@ -14,6 +13,7 @@ import qualified Data.Text as T import Development.IDE import Development.IDE.Core.Rules (getHieFile) import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.GHC.Compat (HieFile (..)) import GHC.Generics (Generic) import Ide.Plugin.Config (PluginConfig (..)) import Ide.Types (PluginDescriptor (..), PluginId, diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 16687bbf3e..2b22e7ad8e 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -2,12 +2,10 @@ resolver: lts-22.43 # ghc-9.6.6 packages: - . - - ./hie-compat - ./hls-graph - ./ghcide/ - ./hls-plugin-api - ./hls-test-utils - # - ./shake-bench ghc-options: "$everything": -haddock @@ -23,6 +21,7 @@ extra-deps: - floskell-0.11.1 - hiedb-0.7.0.0 - hie-bios-0.16.0 + - hie-compat-0.3.1.2 - implicit-hie-0.1.4.0 - lsp-2.7.0.0 - lsp-test-0.17.1.0 diff --git a/stack.yaml b/stack.yaml index 145d2cd0b7..0699726771 100644 --- a/stack.yaml +++ b/stack.yaml @@ -2,12 +2,10 @@ resolver: lts-23.18 # ghc-9.8.4 packages: - . - - ./hie-compat - ./hls-graph - ./ghcide/ - ./hls-plugin-api - ./hls-test-utils - # - ./shake-bench ghc-options: "$everything": -haddock @@ -23,6 +21,7 @@ allow-newer-deps: extra-deps: - floskell-0.11.1 - hiedb-0.7.0.0 + - hie-compat-0.3.1.2 - implicit-hie-0.1.4.0 - hie-bios-0.16.0 - hw-fingertree-0.1.2.1 From ddef7d4f2a1d6c6d82aab9144139759a67ea8e1f Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 15:11:59 +0800 Subject: [PATCH 75/90] Remove RTS options from multi-unit configuration files --- ghcide-test/data/multi-unit/a-1.0.0-inplace | 3 --- ghcide-test/data/multi-unit/c-1.0.0-inplace | 2 -- 2 files changed, 5 deletions(-) diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace index cab2b716ff..a54ea9bc4b 100644 --- a/ghcide-test/data/multi-unit/a-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -16,6 +16,3 @@ base text -XHaskell98 A -+RTS --A32M --RTS diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace index 7421d59279..7201a40de4 100644 --- a/ghcide-test/data/multi-unit/c-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -17,5 +17,3 @@ a-1.0.0-inplace base -XHaskell98 C -+RTS --A32M From d5632d695f05637bb9c4a88907995240922cb4f3 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 15:53:17 +0800 Subject: [PATCH 76/90] format --- cabal.project | 2 -- ghcide/session-loader/Development/IDE/Session/Dependency.hs | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/cabal.project b/cabal.project index e4b31e93f5..fed144eb90 100644 --- a/cabal.project +++ b/cabal.project @@ -51,10 +51,8 @@ constraints: allow-newer: cabal-install-parsers:Cabal-syntax, - if impl(ghc >= 9.11) benchmarks: False allow-newer: cabal-install-parsers:base, cabal-install-parsers:time, - diff --git a/ghcide/session-loader/Development/IDE/Session/Dependency.hs b/ghcide/session-loader/Development/IDE/Session/Dependency.hs index 926e0e47b3..deedf809b8 100644 --- a/ghcide/session-loader/Development/IDE/Session/Dependency.hs +++ b/ghcide/session-loader/Development/IDE/Session/Dependency.hs @@ -1,8 +1,8 @@ module Development.IDE.Session.Dependency where -import Control.Exception.Safe as Safe +import Control.Exception.Safe as Safe import Data.Either.Extra -import qualified Data.Map.Strict as Map +import qualified Data.Map.Strict as Map import Data.Time.Clock import System.Directory From 4c5bc35eb53f3b4436e5670a302fab1a372c9e8b Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 17:51:15 +0800 Subject: [PATCH 77/90] apply the patch from https://github.com/haskell/haskell-language-server/commit/c3b61feccbc87857390b9fdb542ce0b3a701d074 --- ghcide-test/data/multi-unit/a-1.0.0-inplace | 3 +++ ghcide-test/data/multi-unit/c-1.0.0-inplace | 2 ++ ghcide/session-loader/Development/IDE/Session/Ghc.hs | 5 ++++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/ghcide-test/data/multi-unit/a-1.0.0-inplace b/ghcide-test/data/multi-unit/a-1.0.0-inplace index a54ea9bc4b..cab2b716ff 100644 --- a/ghcide-test/data/multi-unit/a-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/a-1.0.0-inplace @@ -16,3 +16,6 @@ base text -XHaskell98 A ++RTS +-A32M +-RTS diff --git a/ghcide-test/data/multi-unit/c-1.0.0-inplace b/ghcide-test/data/multi-unit/c-1.0.0-inplace index 7201a40de4..7421d59279 100644 --- a/ghcide-test/data/multi-unit/c-1.0.0-inplace +++ b/ghcide-test/data/multi-unit/c-1.0.0-inplace @@ -17,3 +17,5 @@ a-1.0.0-inplace base -XHaskell98 C ++RTS +-A32M diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs index 88157bd990..ab1e5b7977 100644 --- a/ghcide/session-loader/Development/IDE/Session/Ghc.hs +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -230,7 +230,10 @@ setOptions haddockOpt cfp (ComponentOptions theOpts compRoot _) dflags rootDir = initMulti unitArgFiles = forM unitArgFiles $ \f -> do args <- liftIO $ expandResponse [f] - initOne args + -- The reponse files may contain arguments like "+RTS", + -- and hie-bios doesn't expand the response files of @-unit@ arguments. + -- Thus, we need to do the stripping here. + initOne $ HieBios.removeRTS $ HieBios.removeVerbosityOpts args initOne this_opts = do (dflags', targets') <- addCmdOpts this_opts dflags let dflags'' = From d943452c92ece39076d6af94f1d256b650138417 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 18:16:39 +0800 Subject: [PATCH 78/90] add comments for orderedSet --- .../session-loader/Development/IDE/Session.hs | 2 +- .../Development/IDE/Session/OrderedSet.hs | 20 ++++++++++++++++--- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 02a0a13763..6feb6325f2 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -509,7 +509,7 @@ incrementVersion state = modifyVar' (version state) succ -- | Get files from the pending file set getPendingFiles :: SessionState -> IO (HashSet FilePath) -getPendingFiles state = atomically $ Set.fromList <$> S.toUnOrderedList (pendingFiles state) +getPendingFiles state = atomically $ S.toHashSet (pendingFiles state) -- | Handle errors during session loading by recording file as having error and removing from pending handleSingleFileProcessingError' :: SessionState -> Maybe FilePath -> FilePath -> PackageSetupException -> SessionM () diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index a2b0a76565..250d6fa086 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -5,6 +5,7 @@ import Control.Concurrent.STM (STM, TQueue, flushTQueue, import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) import Control.Monad (when) import Data.Hashable (Hashable) +import qualified Data.HashSet import qualified Focus import qualified ListT as LT import qualified StmContainers.Set as S @@ -13,6 +14,14 @@ import StmContainers.Set (Set) type OrderedSet a = (TQueue a, Set a) +-- | Insert an element into the ordered set. +-- If the element is not already present, it is added to both the queue and set. +-- If the element already exists, it is moved to the end of the queue to maintain +-- most-recently-inserted ordering semantics. +-- It take O(n), not very good. + +-- Alternative: could preserve original position. +-- I am not sure which one is better. insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s @@ -23,7 +32,6 @@ insert a (que, s) = do mapM_ (writeTQueue que) items return () writeTQueue que a - -- when que $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do @@ -31,6 +39,9 @@ newIO = do s <- S.newIO return (que, s) +-- | Read the first element from the queue. +-- If an element is not in the set, it means it has been deleted, +-- so we retry until we find a valid element that exists in the set. readQueue :: Hashable a => OrderedSet a -> STM a readQueue rs@(que, s) = do f <- readTQueue que @@ -41,8 +52,11 @@ readQueue rs@(que, s) = do lookup :: Hashable a => a -> OrderedSet a -> STM Bool lookup a (_, s) = S.lookup a s +-- | Delete an element from the set. +-- The queue is not modified directly; stale entries are filtered out lazily +-- during reading operations (see 'readQueue'). delete :: Hashable a => a -> OrderedSet a -> STM () delete a (_, s) = S.delete a s -toUnOrderedList :: Hashable a => OrderedSet a -> STM [a] -toUnOrderedList (_, s) = LT.toList $ S.listT s +toHashSet :: Hashable a => OrderedSet a -> Data.HashSet a +toHashSet (_, s) = TreeSet.fromList $ LT.toList $ S.listT s From bd79156a01920b2ba30b5b40b11a96f86762f993 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 18:18:15 +0800 Subject: [PATCH 79/90] update comments for orderedSet --- ghcide/session-loader/Development/IDE/Session/OrderedSet.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index 250d6fa086..33d64b53d8 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -20,7 +20,7 @@ type OrderedSet a = (TQueue a, Set a) -- most-recently-inserted ordering semantics. -- It take O(n), not very good. --- Alternative: could preserve original position. +-- Alternative: preserve original position and ignore new one. -- I am not sure which one is better. insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do From dc34df6d38751038fc1c35ccb5dcaabfad4b90b4 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 4 Aug 2025 18:38:57 +0800 Subject: [PATCH 80/90] fix import --- .../Development/IDE/Session/Ghc.hs | 1 + .../Development/IDE/Session/OrderedSet.hs | 18 ++++-------------- 2 files changed, 5 insertions(+), 14 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session/Ghc.hs b/ghcide/session-loader/Development/IDE/Session/Ghc.hs index ab1e5b7977..7a84263ec9 100644 --- a/ghcide/session-loader/Development/IDE/Session/Ghc.hs +++ b/ghcide/session-loader/Development/IDE/Session/Ghc.hs @@ -30,6 +30,7 @@ import Development.IDE.Types.Diagnostics import Development.IDE.Types.HscEnvEq (HscEnvEq, newHscEnvEq) import Development.IDE.Types.Location import GHC.ResponseFile +import qualified HIE.Bios.Cradle.Utils as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) import Ide.Logger (Pretty (pretty), diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index 33d64b53d8..a66e89f84d 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -16,22 +16,12 @@ type OrderedSet a = (TQueue a, Set a) -- | Insert an element into the ordered set. -- If the element is not already present, it is added to both the queue and set. --- If the element already exists, it is moved to the end of the queue to maintain --- most-recently-inserted ordering semantics. --- It take O(n), not very good. - --- Alternative: preserve original position and ignore new one. --- I am not sure which one is better. +-- If the element already exists, ignore it insert :: Hashable a => a -> OrderedSet a -> STM () insert a (que, s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s -- if already in the set - -- update the position of the element in the queue - when (not inserted) $ do - items <- filter (==a) <$> flushTQueue que - mapM_ (writeTQueue que) items - return () - writeTQueue que a + when inserted $ writeTQueue que a newIO :: Hashable a => IO (OrderedSet a) newIO = do @@ -58,5 +48,5 @@ lookup a (_, s) = S.lookup a s delete :: Hashable a => a -> OrderedSet a -> STM () delete a (_, s) = S.delete a s -toHashSet :: Hashable a => OrderedSet a -> Data.HashSet a -toHashSet (_, s) = TreeSet.fromList $ LT.toList $ S.listT s +toHashSet :: Hashable a => OrderedSet a -> STM (Data.HashSet.HashSet a) +toHashSet (_, s) = Data.HashSet.fromList <$> LT.toList (S.listT s) From 71d65754ed8ff942c55b9d833985990949553eb1 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Tue, 5 Aug 2025 17:17:15 +0800 Subject: [PATCH 81/90] refactor OrderedSet to use a record for better clarity and structure --- .../session-loader/Development/IDE/Session.hs | 1 - .../Development/IDE/Session/OrderedSet.hs | 20 ++++++++++--------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 6feb6325f2..5804ead632 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -56,7 +56,6 @@ import Development.IDE.Types.HscEnvEq (HscEnvEq) import Development.IDE.Types.Location import Development.IDE.Types.Options import qualified HIE.Bios as HieBios -import qualified HIE.Bios.Cradle.Utils as HieBios import HIE.Bios.Environment hiding (getCacheDir) import HIE.Bios.Types hiding (Log) import qualified HIE.Bios.Types as HieBios diff --git a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs index a66e89f84d..630f1dc4fc 100644 --- a/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs +++ b/ghcide/session-loader/Development/IDE/Session/OrderedSet.hs @@ -1,7 +1,6 @@ module Development.IDE.Session.OrderedSet where -import Control.Concurrent.STM (STM, TQueue, flushTQueue, - newTQueueIO) +import Control.Concurrent.STM (STM, TQueue, newTQueueIO) import Control.Concurrent.STM.TQueue (readTQueue, writeTQueue) import Control.Monad (when) import Data.Hashable (Hashable) @@ -12,13 +11,16 @@ import qualified StmContainers.Set as S import StmContainers.Set (Set) -type OrderedSet a = (TQueue a, Set a) +data OrderedSet a = OrderedSet + { insertionOrder :: TQueue a + , elements :: Set a + } -- | Insert an element into the ordered set. -- If the element is not already present, it is added to both the queue and set. -- If the element already exists, ignore it insert :: Hashable a => a -> OrderedSet a -> STM () -insert a (que, s) = do +insert a (OrderedSet que s) = do (_, inserted) <- S.focus (Focus.testingIfInserts $ Focus.insert ()) a s -- if already in the set when inserted $ writeTQueue que a @@ -27,26 +29,26 @@ newIO :: Hashable a => IO (OrderedSet a) newIO = do que <- newTQueueIO s <- S.newIO - return (que, s) + return (OrderedSet que s) -- | Read the first element from the queue. -- If an element is not in the set, it means it has been deleted, -- so we retry until we find a valid element that exists in the set. readQueue :: Hashable a => OrderedSet a -> STM a -readQueue rs@(que, s) = do +readQueue rs@(OrderedSet que s) = do f <- readTQueue que b <- S.lookup f s -- retry if no files are left in the queue if b then return f else readQueue rs lookup :: Hashable a => a -> OrderedSet a -> STM Bool -lookup a (_, s) = S.lookup a s +lookup a (OrderedSet _ s) = S.lookup a s -- | Delete an element from the set. -- The queue is not modified directly; stale entries are filtered out lazily -- during reading operations (see 'readQueue'). delete :: Hashable a => a -> OrderedSet a -> STM () -delete a (_, s) = S.delete a s +delete a (OrderedSet _ s) = S.delete a s toHashSet :: Hashable a => OrderedSet a -> STM (Data.HashSet.HashSet a) -toHashSet (_, s) = Data.HashSet.fromList <$> LT.toList (S.listT s) +toHashSet (OrderedSet _ s) = Data.HashSet.fromList <$> LT.toList (S.listT s) From cfeced8fa3088be7af03e8794ff6504ed0fed0a0 Mon Sep 17 00:00:00 2001 From: Mango The Fourth <40720523+MangoIV@users.noreply.github.com> Date: Tue, 5 Aug 2025 12:20:17 +0200 Subject: [PATCH 82/90] concurrency bug fixes/ improvements (#4663) * [fix] don't bake ide state mvar into setup and getIdeState This is the right thing to do because othewise it is not possible to create new ideStates in a single instance of the executable. This will be useful if the hls executable is supposed to talk to multiple clients and lives beyond a single client disconnecting. * [fix] don't throw hard errors when no shutdown message is handled Previously, when there was no shutdown message by a client and the client disconnected, resulting in the handlers to be GC'd the race that was supposed to free resources for the HieDB & co. would throw a hard error talking about the MVar being unreachable. We would like to instead finish gracefully because finishing the race as soon as the MVar was GC'd is the right thing to do anyway. * [chore] apply suggestions from code review by @fendor * [chore] apply suggestions from code review by @fendor --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- exe/Wrapper.hs | 10 +++- .../src/Development/IDE/LSP/LanguageServer.hs | 51 ++++++++++++------- ghcide/src/Development/IDE/Main.hs | 15 +++--- 3 files changed, 49 insertions(+), 27 deletions(-) diff --git a/exe/Wrapper.hs b/exe/Wrapper.hs index 2c2401ab6a..2fd885ffb3 100644 --- a/exe/Wrapper.hs +++ b/exe/Wrapper.hs @@ -38,7 +38,8 @@ import Control.Monad.Trans.Except (ExceptT, runExceptT, import Data.Maybe import qualified Data.Text as T import qualified Data.Text.IO as T -import Development.IDE.LSP.LanguageServer (runLanguageServer) +import Development.IDE.LSP.LanguageServer (Setup (..), + runLanguageServer) import qualified Development.IDE.Main as Main import Ide.Logger (Doc, Pretty (pretty), Recorder, WithPriority, @@ -300,7 +301,12 @@ launchErrorLSP recorder errorMsg = do [ exitHandler exit ] let interpretHandler (env, _st) = LSP.Iso (LSP.runLspT env . unErrorLSPM) liftIO - pure (doInitialize, asyncHandlers, interpretHandler) + pure MkSetup + { doInitialize + , staticHandlers = asyncHandlers + , interpretHandler + , onExit = [exit] + } runLanguageServer (cmapWithPrio pretty recorder) (Main.argsLspOptions defaultArguments) diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs index cf7845ce08..918e024a4f 100644 --- a/ghcide/src/Development/IDE/LSP/LanguageServer.hs +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -11,6 +11,7 @@ module Development.IDE.LSP.LanguageServer , Log(..) , ThreadQueue , runWithWorkerThreads + , Setup (..) ) where import Control.Concurrent.STM @@ -81,6 +82,17 @@ instance Pretty Log where LogLspServer msg -> pretty msg LogServerShutdownMessage -> "Received shutdown message" +data Setup config m a + = MkSetup + { doInitialize :: LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either (TResponseError Method_Initialize) (LSP.LanguageContextEnv config, a)) + -- ^ the callback invoked when the language server receives the 'Method_Initialize' request + , staticHandlers :: LSP.Handlers m + -- ^ the statically known handlers of the lsp server + , interpretHandler :: (LanguageContextEnv config, a) -> m <~> IO + -- ^ how to interpret @m@ to 'IO' and how to lift 'IO' into @m@ + , onExit :: [IO ()] + -- ^ a list of 'IO' actions that clean up resources and must be run when the server shuts down + } runLanguageServer :: forall config a m. (Show config) @@ -90,18 +102,16 @@ runLanguageServer -> Handle -- output -> config -> (config -> Value -> Either T.Text config) - -> (config -> m config ()) - -> (MVar () - -> IO (LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either (TResponseError Method_Initialize) (LSP.LanguageContextEnv config, a)), - LSP.Handlers (m config), - (LanguageContextEnv config, a) -> m config <~> IO)) + -> (config -> m ()) + -> (MVar () -> IO (Setup config m a)) -> IO () runLanguageServer recorder options inH outH defaultConfig parseConfig onConfigChange setup = do -- This MVar becomes full when the server thread exits or we receive exit message from client. -- LSP server will be canceled when it's full. clientMsgVar <- newEmptyMVar - (doInitialize, staticHandlers, interpretHandler) <- setup clientMsgVar + MkSetup + { doInitialize, staticHandlers, interpretHandler, onExit } <- setup clientMsgVar let serverDefinition = LSP.ServerDefinition { LSP.parseConfig = parseConfig @@ -115,28 +125,29 @@ runLanguageServer recorder options inH outH defaultConfig parseConfig onConfigCh , LSP.options = modifyOptions options } - let lspCologAction :: MonadIO m2 => Colog.LogAction m2 (Colog.WithSeverity LspServerLog) + let lspCologAction :: forall io. MonadIO io => Colog.LogAction io (Colog.WithSeverity LspServerLog) lspCologAction = toCologActionWithPrio (cmapWithPrio LogLspServer recorder) - void $ untilMVar clientMsgVar $ - void $ LSP.runServerWithHandles + let runServer = + LSP.runServerWithHandles lspCologAction lspCologAction inH outH serverDefinition + untilMVar clientMsgVar $ + runServer `finally` sequence_ onExit + setupLSP :: - forall config err. + forall config. Recorder (WithPriority Log) -> FilePath -- ^ root directory, see Note [Root Directory] -> (FilePath -> IO FilePath) -- ^ Map root paths to the location of the hiedb for the project -> LSP.Handlers (ServerM config) -> (LSP.LanguageContextEnv config -> FilePath -> WithHieDb -> ThreadQueue -> IO IdeState) -> MVar () - -> IO (LSP.LanguageContextEnv config -> TRequestMessage Method_Initialize -> IO (Either err (LSP.LanguageContextEnv config, IdeState)), - LSP.Handlers (ServerM config), - (LanguageContextEnv config, IdeState) -> ServerM config <~> IO) + -> IO (Setup config (ServerM config) IdeState) setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar = do -- Send everything over a channel, since you need to wait until after initialise before -- LspFuncs is available @@ -171,7 +182,7 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar cancelled <- readTVar cancelledRequests unless (reqId `Set.member` cancelled) retry - let asyncHandlers = mconcat + let staticHandlers = mconcat [ userHandlers , cancelHandler cancelRequest , exitHandler exit @@ -184,7 +195,9 @@ setupLSP recorder defaultRoot getHieDbLoc userHandlers getIdeState clientMsgVar let interpretHandler (env, st) = LSP.Iso (LSP.runLspT env . flip (runReaderT . unServerM) (clientMsgChan,st)) liftIO - pure (doInitialize, asyncHandlers, interpretHandler) + let onExit = [stopReactorLoop, exit] + + pure MkSetup {doInitialize, staticHandlers, interpretHandler, onExit} handleInit @@ -266,10 +279,12 @@ runWithWorkerThreads recorder dbLoc f = evalContT $ do liftIO $ f hiedb (ThreadQueue threadQueue sessionRestartTQueue sessionLoaderTQueue) -- | Runs the action until it ends or until the given MVar is put. +-- It is important, that the thread that puts the 'MVar' is not dropped before it puts the 'MVar' i.e. it should +-- occur as the final action in a 'finally' or 'bracket', because otherwise this thread will finish early (as soon +-- as the thread receives the BlockedIndefinitelyOnMVar exception) -- Rethrows any exceptions. -untilMVar :: MonadUnliftIO m => MVar () -> m () -> m () -untilMVar mvar io = void $ - waitAnyCancel =<< traverse async [ io , readMVar mvar ] +untilMVar :: MonadUnliftIO m => MVar () -> m a -> m () +untilMVar mvar io = race_ (readMVar mvar) io cancelHandler :: (SomeLspId -> IO ()) -> LSP.Handlers (ServerM c) cancelHandler cancelRequest = LSP.notificationHandler SMethod_CancelRequest $ \TNotificationMessage{_params=CancelParams{_id}} -> diff --git a/ghcide/src/Development/IDE/Main.hs b/ghcide/src/Development/IDE/Main.hs index 872e957364..ad4a36327a 100644 --- a/ghcide/src/Development/IDE/Main.hs +++ b/ghcide/src/Development/IDE/Main.hs @@ -12,7 +12,7 @@ module Development.IDE.Main ) where import Control.Concurrent.Extra (withNumCapabilities) -import Control.Concurrent.MVar (newEmptyMVar, +import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, tryReadMVar) import Control.Concurrent.STM.Stats (dumpSTMStats) import Control.Monad.Extra (concatMapM, unless, @@ -318,9 +318,8 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re ioT <- offsetTime logWith recorder Info $ LogLspStart (pluginId <$> ipMap argsHlsPlugins) - ideStateVar <- newEmptyMVar - let getIdeState :: LSP.LanguageContextEnv Config -> FilePath -> WithHieDb -> Shake.ThreadQueue -> IO IdeState - getIdeState env rootPath withHieDb threadQueue = do + let getIdeState :: MVar IdeState -> LSP.LanguageContextEnv Config -> FilePath -> WithHieDb -> Shake.ThreadQueue -> IO IdeState + getIdeState ideStateVar env rootPath withHieDb threadQueue = do t <- ioT logWith recorder Info $ LogLspStartDuration t sessionLoader <- loadSessionWithOptions (cmapWithPrio LogSession recorder) argsSessionLoadingOptions rootPath (tLoaderQueue threadQueue) @@ -353,9 +352,9 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re putMVar ideStateVar ide pure ide - let setup = setupLSP (cmapWithPrio LogLanguageServer recorder) argsProjectRoot argsGetHieDbLoc (pluginHandlers plugins) getIdeState + let setup ideStateVar = setupLSP (cmapWithPrio LogLanguageServer recorder) argsProjectRoot argsGetHieDbLoc (pluginHandlers plugins) (getIdeState ideStateVar) -- See Note [Client configuration in Rules] - onConfigChange cfg = do + onConfigChange ideStateVar cfg = do -- TODO: this is nuts, we're converting back to JSON just to get a fingerprint let cfgObj = J.toJSON cfg mide <- liftIO $ tryReadMVar ideStateVar @@ -368,7 +367,9 @@ defaultMain recorder Arguments{..} = withHeapStats (cmapWithPrio LogHeapStats re modifyClientSettings ide (const $ Just cfgObj) return [toNoFileKey Rules.GetClientSettings] - runLanguageServer (cmapWithPrio LogLanguageServer recorder) options inH outH argsDefaultHlsConfig argsParseConfig onConfigChange setup + do + ideStateVar <- newEmptyMVar + runLanguageServer (cmapWithPrio LogLanguageServer recorder) options inH outH argsDefaultHlsConfig argsParseConfig (onConfigChange ideStateVar) (setup ideStateVar) dumpSTMStats Check argFiles -> do let dir = argsProjectRoot From 4d309d50fde5943f566af62a0c3a39aaac873605 Mon Sep 17 00:00:00 2001 From: VeryMilkyJoe Date: Wed, 16 Apr 2025 14:21:40 +0200 Subject: [PATCH 83/90] Add Code Action for adding a module to the cabal file For diagnostics complaining about the current module being unknown, we now offer code actions to add the module to any possible field in the responsible cabal file. Additionally, refactor the cabal-plugin into smaller modules and refactor the add-package feature to have some shared functions to be used for both add-package and add-module. --- haskell-language-server.cabal | 12 +- .../hls-cabal-plugin/src/Ide/Plugin/Cabal.hs | 489 ++++++------------ .../src/Ide/Plugin/Cabal/CabalAdd.hs | 326 ------------ .../Ide/Plugin/Cabal/CabalAdd/CodeAction.hs | 343 ++++++++++++ .../src/Ide/Plugin/Cabal/CabalAdd/Command.hs | 232 +++++++++ .../src/Ide/Plugin/Cabal/CabalAdd/Types.hs | 104 ++++ .../src/Ide/Plugin/Cabal/Files.hs | 56 ++ .../src/Ide/Plugin/Cabal/OfInterest.hs | 122 +++++ .../src/Ide/Plugin/Cabal/Orphans.hs | 17 +- .../src/Ide/Plugin/Cabal/Parse.hs | 4 +- .../src/Ide/Plugin/Cabal/Rules.hs | 160 ++++++ plugins/hls-cabal-plugin/test/CabalAdd.hs | 145 ++++-- plugins/hls-cabal-plugin/test/Main.hs | 11 +- .../testdata/cabal-add-module/library/Main.hs | 4 + .../cabal-add-module/library/cabal.project | 1 + .../cabal-add-module/library/hie.yaml | 2 + .../cabal-add-module/library/test.cabal | 26 + stack-lts22.yaml | 4 +- stack.yaml | 4 +- 19 files changed, 1347 insertions(+), 715 deletions(-) delete mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs create mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs create mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs create mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs create mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs create mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs create mode 100644 plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs create mode 100644 plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs create mode 100644 plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project create mode 100644 plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml create mode 100644 plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index ab57fa79ea..dc4cb246d0 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -254,8 +254,13 @@ library hls-cabal-plugin Ide.Plugin.Cabal.Completion.Types Ide.Plugin.Cabal.Definition Ide.Plugin.Cabal.FieldSuggest + Ide.Plugin.Cabal.Files + Ide.Plugin.Cabal.OfInterest Ide.Plugin.Cabal.LicenseSuggest - Ide.Plugin.Cabal.CabalAdd + Ide.Plugin.Cabal.Rules + Ide.Plugin.Cabal.CabalAdd.Command + Ide.Plugin.Cabal.CabalAdd.CodeAction + Ide.Plugin.Cabal.CabalAdd.Types Ide.Plugin.Cabal.Orphans Ide.Plugin.Cabal.Outline Ide.Plugin.Cabal.Parse @@ -276,14 +281,14 @@ library hls-cabal-plugin , lens , lsp ^>=2.7 , lsp-types ^>=2.3 + , mtl , regex-tdfa ^>=1.3.1 , text , text-rope , transformers , unordered-containers >=0.2.10.0 , containers - , cabal-add ^>=0.1 - , process + , cabal-add ^>=0.2 , aeson , Cabal , pretty @@ -315,7 +320,6 @@ test-suite hls-cabal-plugin-tests , lens , lsp-types , text - , hls-plugin-api ----------------------------- -- class plugin diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs index 9a56467f3f..78db726f77 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs @@ -6,61 +6,48 @@ module Ide.Plugin.Cabal (descriptor, haskellInteractionDescriptor, Log (..)) where -import Control.Concurrent.Strict -import Control.DeepSeq import Control.Lens ((^.)) import Control.Monad.Extra import Control.Monad.IO.Class import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Maybe (runMaybeT) -import qualified Data.ByteString as BS -import Data.Hashable import Data.HashMap.Strict (HashMap) -import qualified Data.HashMap.Strict as HashMap import qualified Data.List as List -import qualified Data.List.NonEmpty as NE import qualified Data.Maybe as Maybe -import Data.Proxy import qualified Data.Text () import qualified Data.Text as T -import qualified Data.Text.Encoding as Encoding -import Data.Text.Utf16.Rope.Mixed as Rope import Development.IDE as D import Development.IDE.Core.FileStore (getVersionedTextDoc) import Development.IDE.Core.PluginUtils import Development.IDE.Core.Shake (restartShakeSession) -import qualified Development.IDE.Core.Shake as Shake -import Development.IDE.Graph (Key, - alwaysRerun) +import Development.IDE.Graph (Key) import Development.IDE.LSP.HoverDefinition (foundHover) import qualified Development.IDE.Plugin.Completions.Logic as Ghcide import Development.IDE.Types.Shake (toKey) -import qualified Distribution.CabalSpecVersion as Cabal import qualified Distribution.Fields as Syntax import Distribution.Package (Dependency) import Distribution.PackageDescription (allBuildDepends, depPkgName, unPackageName) import Distribution.PackageDescription.Configuration (flattenPackageDescription) -import Distribution.Parsec.Error import qualified Distribution.Parsec.Position as Syntax -import GHC.Generics -import qualified Ide.Plugin.Cabal.CabalAdd as CabalAdd +import qualified Ide.Plugin.Cabal.CabalAdd.CodeAction as CabalAdd +import qualified Ide.Plugin.Cabal.CabalAdd.Command as CabalAdd import Ide.Plugin.Cabal.Completion.CabalFields as CabalFields import qualified Ide.Plugin.Cabal.Completion.Completer.Types as CompleterTypes import qualified Ide.Plugin.Cabal.Completion.Completions as Completions -import qualified Ide.Plugin.Cabal.Completion.Data as Data import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), ParseCabalFields (..), ParseCabalFile (..)) import qualified Ide.Plugin.Cabal.Completion.Types as Types import Ide.Plugin.Cabal.Definition (gotoDefinition) -import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics import qualified Ide.Plugin.Cabal.FieldSuggest as FieldSuggest +import qualified Ide.Plugin.Cabal.Files as CabalAdd import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest +import qualified Ide.Plugin.Cabal.OfInterest as OfInterest import Ide.Plugin.Cabal.Orphans () import Ide.Plugin.Cabal.Outline -import qualified Ide.Plugin.Cabal.Parse as Parse +import qualified Ide.Plugin.Cabal.Rules as Rules import Ide.Plugin.Error import Ide.Types import qualified Language.LSP.Protocol.Lens as JL @@ -71,7 +58,8 @@ import Text.Regex.TDFA data Log = LogModificationTime NormalizedFilePath FileVersion - | LogShake Shake.Log + | LogRule Rules.Log + | LogOfInterest OfInterest.Log | LogDocOpened Uri | LogDocModified Uri | LogDocSaved Uri @@ -84,7 +72,8 @@ data Log instance Pretty Log where pretty = \case - LogShake log' -> pretty log' + LogRule log' -> pretty log' + LogOfInterest log' -> pretty log' LogModificationTime nfp modTime -> "Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime) LogDocOpened uri -> @@ -105,28 +94,30 @@ instance Pretty Log where LogCompletions logs -> pretty logs LogCabalAdd logs -> pretty logs --- | Some actions with cabal files originate from haskell files. --- This descriptor allows to hook into the diagnostics of haskell source files, and --- allows us to provide code actions and commands that interact with `.cabal` files. +{- | Some actions in cabal files can be triggered from haskell files. +This descriptor allows us to hook into the diagnostics of haskell source files and +allows us to provide code actions and commands that interact with `.cabal` files. +-} haskellInteractionDescriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState haskellInteractionDescriptor recorder plId = (defaultPluginDescriptor plId "Provides the cabal-add code action in haskell files") { pluginHandlers = mconcat - [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction cabalAddCodeAction + [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ cabalAddDependencyCodeAction recorder + , mkPluginHandler LSP.SMethod_TextDocumentCodeAction $ cabalAddModuleCodeAction recorder ] - , pluginCommands = [PluginCommand CabalAdd.cabalAddCommand "add a dependency to a cabal file" (CabalAdd.command cabalAddRecorder)] - , pluginRules = pure () - , pluginNotificationHandlers = mempty + , pluginCommands = + [ PluginCommand CabalAdd.cabalAddDependencyCommandId "add a dependency to a cabal file" (CabalAdd.addDependencyCommand cabalAddRecorder) + , PluginCommand CabalAdd.cabalAddModuleCommandId "add a module to a cabal file" (CabalAdd.addModuleCommand cabalAddRecorder) + ] } - where - cabalAddRecorder = cmapWithPrio LogCabalAdd recorder - + where + cabalAddRecorder = cmapWithPrio LogCabalAdd recorder descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState descriptor recorder plId = (defaultCabalPluginDescriptor plId "Provides a variety of IDE features in cabal files") - { pluginRules = cabalRules recorder plId + { pluginRules = Rules.cabalRules ruleRecorder plId , pluginHandlers = mconcat [ mkPluginHandler LSP.SMethod_TextDocumentCodeAction licenseSuggestCodeAction @@ -143,32 +134,35 @@ descriptor recorder plId = whenUriFile _uri $ \file -> do log' Debug $ LogDocOpened _uri restartCabalShakeSession (shakeExtras ide) vfs file "(opened)" $ - addFileOfInterest recorder ide file Modified{firstOpen = True} + OfInterest.addFileOfInterest ofInterestRecorder ide file Modified{firstOpen = True} , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidChange $ \ide vfs _ (DidChangeTextDocumentParams VersionedTextDocumentIdentifier{_uri} _) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocModified _uri restartCabalShakeSession (shakeExtras ide) vfs file "(changed)" $ - addFileOfInterest recorder ide file Modified{firstOpen = False} + OfInterest.addFileOfInterest ofInterestRecorder ide file Modified{firstOpen = False} , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidSave $ \ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocSaved _uri restartCabalShakeSession (shakeExtras ide) vfs file "(saved)" $ - addFileOfInterest recorder ide file OnDisk + OfInterest.addFileOfInterest ofInterestRecorder ide file OnDisk , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidClose $ \ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocClosed _uri restartCabalShakeSession (shakeExtras ide) vfs file "(closed)" $ - deleteFileOfInterest recorder ide file + OfInterest.deleteFileOfInterest ofInterestRecorder ide file ] - , pluginConfigDescriptor = defaultConfigDescriptor - { configHasDiagnostics = True - } + , pluginConfigDescriptor = + defaultConfigDescriptor + { configHasDiagnostics = True + } } where log' = logWith recorder + ruleRecorder = cmapWithPrio LogRule recorder + ofInterestRecorder = cmapWithPrio LogOfInterest recorder whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO () whenUriFile uri act = whenJust (uriToFilePath uri) $ act . toNormalizedFilePath' @@ -186,146 +180,29 @@ restartCabalShakeSession :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> Stri restartCabalShakeSession shakeExtras vfs file actionMsg actionBetweenSession = do restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) [] $ do keys <- actionBetweenSession - return (toKey GetModificationTime file:keys) - --- ---------------------------------------------------------------- --- Plugin Rules --- ---------------------------------------------------------------- - -cabalRules :: Recorder (WithPriority Log) -> PluginId -> Rules () -cabalRules recorder plId = do - -- Make sure we initialise the cabal files-of-interest. - ofInterestRules recorder - -- Rule to produce diagnostics for cabal files. - define (cmapWithPrio LogShake recorder) $ \ParseCabalFields file -> do - config <- getPluginConfigAction plId - if not (plcGlobalOn config && plcDiagnosticsOn config) - then pure ([], Nothing) - else do - -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), - -- we rerun this rule because this rule *depends* on GetModificationTime. - (t, mCabalSource) <- use_ GetFileContents file - log' Debug $ LogModificationTime file t - contents <- case mCabalSource of - Just sources -> - pure $ Encoding.encodeUtf8 $ Rope.toText sources - Nothing -> do - liftIO $ BS.readFile $ fromNormalizedFilePath file - - case Parse.readCabalFields file contents of - Left _ -> - pure ([], Nothing) - Right fields -> - pure ([], Just fields) - - define (cmapWithPrio LogShake recorder) $ \ParseCabalCommonSections file -> do - fields <- use_ ParseCabalFields file - let commonSections = Maybe.mapMaybe (\case - commonSection@(Syntax.Section (Syntax.Name _ "common") _ _) -> Just commonSection - _ -> Nothing) - fields - pure ([], Just commonSections) - - define (cmapWithPrio LogShake recorder) $ \ParseCabalFile file -> do - config <- getPluginConfigAction plId - if not (plcGlobalOn config && plcDiagnosticsOn config) - then pure ([], Nothing) - else do - -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), - -- we rerun this rule because this rule *depends* on GetModificationTime. - (t, mCabalSource) <- use_ GetFileContents file - log' Debug $ LogModificationTime file t - contents <- case mCabalSource of - Just sources -> - pure $ Encoding.encodeUtf8 $ Rope.toText sources - Nothing -> do - liftIO $ BS.readFile $ fromNormalizedFilePath file - - -- Instead of fully reparsing the sources to get a 'GenericPackageDescription', - -- we would much rather re-use the already parsed results of 'ParseCabalFields'. - -- Unfortunately, Cabal-syntax doesn't expose the function 'parseGenericPackageDescription'' - -- which allows us to resume the parsing pipeline with '[Field Position]'. - (pWarnings, pm) <- liftIO $ Parse.parseCabalFileContents contents - let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings - case pm of - Left (_cabalVersion, pErrorNE) -> do - let regexUnknownCabalBefore310 :: T.Text - -- We don't support the cabal version, this should not be an error, as the - -- user did not do anything wrong. Instead we cast it to a warning - regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*" - regexUnknownCabalVersion :: T.Text - regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+" - unsupportedCabalHelpText = unlines - [ "The used `cabal-version` is not fully supported by this `HLS` binary." - , "Either the `cabal-version` is unknown, or too new for this executable." - , "This means that some functionality might not work as expected." - , "If you face any issues, try downgrading to a supported `cabal-version` or upgrading `HLS` if possible." - , "" - , "Supported versions are: " <> - List.intercalate ", " - (fmap Cabal.showCabalSpecVersion Data.supportedCabalVersions) - ] - errorDiags = - NE.toList $ - NE.map - ( \pe@(PError pos text) -> - if any (text =~) - [ regexUnknownCabalBefore310 - , regexUnknownCabalVersion - ] - then Diagnostics.warningDiagnostic file (Syntax.PWarning Syntax.PWTOther pos $ - unlines - [ text - , unsupportedCabalHelpText - ]) - else Diagnostics.errorDiagnostic file pe - ) - pErrorNE - allDiags = errorDiags <> warningDiags - pure (allDiags, Nothing) - Right gpd -> do - pure (warningDiags, Just gpd) - - action $ do - -- Run the cabal kick. This code always runs when 'shakeRestart' is run. - -- Must be careful to not impede the performance too much. Crucial to - -- a snappy IDE experience. - kick - where - log' = logWith recorder - -{- | This is the kick function for the cabal plugin. -We run this action, whenever we shake session us run/restarted, which triggers -actions to produce diagnostics for cabal files. - -It is paramount that this kick-function can be run quickly, since it is a blocking -function invocation. --} -kick :: Action () -kick = do - files <- HashMap.keys <$> getCabalFilesOfInterestUntracked - Shake.runWithSignal (Proxy @"kick/start/cabal") (Proxy @"kick/done/cabal") files Types.ParseCabalFile + return (toKey GetModificationTime file : keys) -- ---------------------------------------------------------------- -- Code Actions -- ---------------------------------------------------------------- licenseSuggestCodeAction :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -licenseSuggestCodeAction ideState _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=diags}) = do +licenseSuggestCodeAction ideState _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics = diags}) = do maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal-plugin.suggestLicense" ideState getClientConfigAction pure $ InL $ diags >>= (fmap InR . LicenseSuggest.licenseErrorAction maxCompls uri) --- | CodeActions for correcting field names with typos in them. --- --- Provides CodeActions that fix typos in both stanzas and top-level field names. --- The suggestions are computed based on the completion context, where we "move" a fake cursor --- to the end of the field name and trigger cabal file completions. The completions are then --- suggested to the user. --- --- TODO: Relying on completions here often does not produce the desired results, we should --- use some sort of fuzzy matching in the future, see issue #4357. +{- | CodeActions for correcting field names with typos in them. + +Provides CodeActions that fix typos in both stanzas and top-level field names. +The suggestions are computed based on the completion context, where we "move" a fake cursor +to the end of the field name and trigger cabal file completions. The completions are then +suggested to the user. + +TODO: Relying on completions here often does not produce the desired results, we should +use some sort of fuzzy matching in the future, see issue #4357. +-} fieldSuggestCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics=diags}) = do +fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = do mContents <- liftIO $ runAction "cabal-plugin.getUriContents" ide $ getUriContents $ toNormalizedUri uri case (,) <$> mContents <*> uriToFilePath' uri of Nothing -> pure $ InL [] @@ -340,47 +217,80 @@ fieldSuggestCodeAction recorder ide _ (CodeActionParams _ _ (TextDocumentIdentif let fields = Maybe.mapMaybe FieldSuggest.fieldErrorName diags results <- forM fields (getSuggestion fileContents path cabalFields) pure $ InL $ map InR $ concat results - where - getSuggestion fileContents fp cabalFields (fieldName,Diagnostic{ _range=_range@(Range (Position lineNr col) _) }) = do - let -- Compute where we would anticipate the cursor to be. - fakeLspCursorPosition = Position lineNr (col + fromIntegral (T.length fieldName)) - lspPrefixInfo = Ghcide.getCompletionPrefixFromRope fakeLspCursorPosition fileContents - cabalPrefixInfo = Completions.getCabalPrefixInfo fp lspPrefixInfo - completions <- liftIO $ computeCompletionsAt recorder ide cabalPrefixInfo fp cabalFields - let completionTexts = fmap (^. JL.label) completions - pure $ FieldSuggest.fieldErrorAction uri fieldName completionTexts _range - -cabalAddCodeAction :: PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction -cabalAddCodeAction state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics=diags}) = do - maxCompls <- fmap maxCompletions . liftIO $ runAction "cabal.cabal-add" state getClientConfigAction - let suggestions = take maxCompls $ concatMap CabalAdd.hiddenPackageSuggestion diags + where + getSuggestion fileContents fp cabalFields (fieldName, Diagnostic{_range = _range@(Range (Position lineNr col) _)}) = do + let + -- Compute where we would anticipate the cursor to be. + fakeLspCursorPosition = Position lineNr (col + fromIntegral (T.length fieldName)) + lspPrefixInfo = Ghcide.getCompletionPrefixFromRope fakeLspCursorPosition fileContents + cabalPrefixInfo = Completions.getCabalPrefixInfo fp lspPrefixInfo + completions <- liftIO $ computeCompletionsAt recorder ide cabalPrefixInfo fp cabalFields + let completionTexts = fmap (^. JL.label) completions + pure $ FieldSuggest.fieldErrorAction uri fieldName completionTexts _range + +cabalAddDependencyCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddDependencyCodeAction _ state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = do + let suggestions = concatMap CabalAdd.hiddenPackageSuggestion diags case suggestions of [] -> pure $ InL [] - _ -> - case uriToFilePath uri of + _ -> do + haskellFilePath <- uriToFilePathE uri + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of Nothing -> pure $ InL [] - Just haskellFilePath -> do - mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath - case mbCabalFile of + Just cabalFilePath -> do + verTxtDocId <- + runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ + getVersionedTextDoc $ + TextDocumentIdentifier (filePathToUri cabalFilePath) + mbGPD <- liftIO $ runAction "cabal.cabal-add" state $ useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + case mbGPD of Nothing -> pure $ InL [] - Just cabalFilePath -> do - verTxtDocId <- runActionE "cabalAdd.getVersionedTextDoc" state $ - lift $ getVersionedTextDoc $ TextDocumentIdentifier (filePathToUri cabalFilePath) - mbGPD <- liftIO $ runAction "cabal.cabal-add" state $ useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath - case mbGPD of - Nothing -> pure $ InL [] - Just (gpd, _) -> do - actions <- liftIO $ CabalAdd.addDependencySuggestCodeAction plId verTxtDocId - suggestions - haskellFilePath cabalFilePath - gpd - pure $ InL $ fmap InR actions + Just (gpd, _) -> do + actions <- + liftIO $ + CabalAdd.addDependencySuggestCodeAction + plId + verTxtDocId + suggestions + haskellFilePath + cabalFilePath + gpd + pure $ InL $ fmap InR actions + +cabalAddModuleCodeAction :: Recorder (WithPriority Log) -> PluginMethodHandler IdeState 'LSP.Method_TextDocumentCodeAction +cabalAddModuleCodeAction recorder state plId (CodeActionParams _ _ (TextDocumentIdentifier uri) _ CodeActionContext{_diagnostics = diags}) = + case List.find CabalAdd.isUnknownModuleDiagnostic diags of + Just diag -> + do + haskellFilePath <- uriToFilePathE uri + mbCabalFile <- liftIO $ CabalAdd.findResponsibleCabalFile haskellFilePath + case mbCabalFile of + Nothing -> pure $ InL [] + Just cabalFilePath -> do + verTextDocId <- + runActionE "cabalAdd.getVersionedTextDoc" state $ + lift $ + getVersionedTextDoc $ + TextDocumentIdentifier (filePathToUri cabalFilePath) + (gpd, _) <- runActionE "cabal.cabal-add" state $ useWithStaleE ParseCabalFile $ toNormalizedFilePath cabalFilePath + actions <- + CabalAdd.collectModuleInsertionOptions + (cmapWithPrio LogCabalAdd recorder) + plId + verTextDocId + diag + cabalFilePath + gpd + uri + pure $ InL $ fmap InR actions + Nothing -> pure $ InL [] --- | Handler for hover messages. --- --- Provides a Handler for displaying message on hover. --- If found that the filtered hover message is a dependency, --- adds a Documentation link. +{- | Handler for hover messages. + +If the cursor is hovering on a dependency, add a documentation link to that dependency. +-} hover :: PluginMethodHandler IdeState LSP.Method_TextDocumentHover hover ide _ msgParam = do nfp <- getNormalizedFilePathE uri @@ -395,111 +305,35 @@ hover ide _ msgParam = do Nothing -> pure $ InR Null Just txt -> if txt `elem` depsNames - then pure $ foundHover (Nothing, [txt <> "\n", documentationText txt]) - else pure $ InR Null - where - cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) - uri = msgParam ^. JL.textDocument . JL.uri - - dependencyName :: Dependency -> T.Text - dependencyName dep = T.pack $ unPackageName $ depPkgName dep - - -- | Removes version requirements like - -- `==1.0.0.0`, `>= 2.1.1` that could be included in - -- hover message. Assumes that the dependency consists - -- of alphanums with dashes in between. Ends with an alphanum. - -- - -- Examples: - -- >>> filterVersion "imp-deps>=2.1.1" - -- "imp-deps" - filterVersion :: T.Text -> Maybe T.Text - filterVersion msg = getMatch (msg =~ regex) - where - regex :: T.Text - regex = "([a-zA-Z0-9-]*[a-zA-Z0-9])" - - getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> Maybe T.Text - getMatch (_, _, _, [dependency]) = Just dependency - getMatch (_, _, _, _) = Nothing -- impossible case - - documentationText :: T.Text -> T.Text - documentationText package = "[Documentation](https://hackage.haskell.org/package/" <> package <> ")" - - --- ---------------------------------------------------------------- --- Cabal file of Interest rules and global variable --- ---------------------------------------------------------------- - -{- | Cabal files that are currently open in the lsp-client. -Specific actions happen when these files are saved, closed or modified, -such as generating diagnostics, re-parsing, etc... - -We need to store the open files to parse them again if we restart the shake session. -Restarting of the shake session happens whenever these files are modified. --} -newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) - -instance Shake.IsIdeGlobal OfInterestCabalVar - -data IsCabalFileOfInterest = IsCabalFileOfInterest - deriving (Eq, Show, Generic) -instance Hashable IsCabalFileOfInterest -instance NFData IsCabalFileOfInterest - -type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult - -data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus - deriving (Eq, Show, Generic) -instance Hashable CabalFileOfInterestResult -instance NFData CabalFileOfInterestResult - -{- | The rule that initialises the files of interest state. - -Needs to be run on start-up. --} -ofInterestRules :: Recorder (WithPriority Log) -> Rules () -ofInterestRules recorder = do - Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty) - Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do - alwaysRerun - filesOfInterest <- getCabalFilesOfInterestUntracked - let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest - fp = summarize foi - res = (Just fp, Just foi) - return res - where - summarize NotCabalFOI = BS.singleton 0 - summarize (IsCabalFOI OnDisk) = BS.singleton 1 - summarize (IsCabalFOI (Modified False)) = BS.singleton 2 - summarize (IsCabalFOI (Modified True)) = BS.singleton 3 - -getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) -getCabalFilesOfInterestUntracked = do - OfInterestCabalVar var <- Shake.getIdeGlobalAction - liftIO $ readVar var - -addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO [Key] -addFileOfInterest recorder state f v = do - OfInterestCabalVar var <- Shake.getIdeGlobalState state - (prev, files) <- modifyVar var $ \dict -> do - let (prev, new) = HashMap.alterF (,Just v) f dict - pure (new, (prev, new)) - if prev /= Just v - then do - log' Debug $ LogFOI files - return [toKey IsCabalFileOfInterest f] - else return [] - where - log' = logWith recorder - -deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO [Key] -deleteFileOfInterest recorder state f = do - OfInterestCabalVar var <- Shake.getIdeGlobalState state - files <- modifyVar' var $ HashMap.delete f - log' Debug $ LogFOI files - return [toKey IsFileOfInterest f] + then pure $ foundHover (Nothing, [txt <> "\n", documentationText txt]) + else pure $ InR Null where - log' = logWith recorder + cursor = Types.lspPositionToCabalPosition (msgParam ^. JL.position) + uri = msgParam ^. JL.textDocument . JL.uri + + dependencyName :: Dependency -> T.Text + dependencyName dep = T.pack $ unPackageName $ depPkgName dep + + -- \| Removes version requirements like + -- `==1.0.0.0`, `>= 2.1.1` that could be included in + -- hover message. Assumes that the dependency consists + -- of alphanums with dashes in between. Ends with an alphanum. + -- + -- Examples: + -- >>> filterVersion "imp-deps>=2.1.1" + -- "imp-deps" + filterVersion :: T.Text -> Maybe T.Text + filterVersion msg = getMatch (msg =~ regex) + where + regex :: T.Text + regex = "([a-zA-Z0-9-]*[a-zA-Z0-9])" + + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> Maybe T.Text + getMatch (_, _, _, [dependency]) = Just dependency + getMatch (_, _, _, _) = Nothing -- impossible case + + documentationText :: T.Text -> T.Text + documentationText package = "[Documentation](https://hackage.haskell.org/package/" <> package <> ")" -- ---------------------------------------------------------------- -- Completion @@ -532,23 +366,24 @@ computeCompletionsAt recorder ide prefInfo fp fields = do Just ctx -> do logWith recorder Debug $ LogCompletionContext ctx pos let completer = Completions.contextToCompleter ctx - let completerData = CompleterTypes.CompleterData - { getLatestGPD = do - -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, - -- thus, a quick response gives us the desired result most of the time. - -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. - mGPD <- runAction "cabal-plugin.modulesCompleter.gpd" ide $ useWithStale ParseCabalFile $ toNormalizedFilePath fp - pure $ fmap fst mGPD - , getCabalCommonSections = runAction "cabal-plugin.commonSections" ide $ use ParseCabalCommonSections $ toNormalizedFilePath fp - , cabalPrefixInfo = prefInfo - , stanzaName = - case fst ctx of - Types.Stanza _ name -> name - _ -> Nothing - } + let completerData = + CompleterTypes.CompleterData + { getLatestGPD = do + -- We decide on useWithStaleFast here, since we mostly care about the file's meta information, + -- thus, a quick response gives us the desired result most of the time. + -- The `withStale` option is very important here, since we often call this rule with invalid cabal files. + mGPD <- runAction "cabal-plugin.modulesCompleter.gpd" ide $ useWithStale ParseCabalFile $ toNormalizedFilePath fp + pure $ fmap fst mGPD + , getCabalCommonSections = runAction "cabal-plugin.commonSections" ide $ use ParseCabalCommonSections $ toNormalizedFilePath fp + , cabalPrefixInfo = prefInfo + , stanzaName = + case fst ctx of + Types.Stanza _ name -> name + _ -> Nothing + } completions <- completer completerRecorder completerData pure completions - where - pos = Types.completionCursorPosition prefInfo - context fields = Completions.getContext completerRecorder prefInfo fields - completerRecorder = cmapWithPrio LogCompletions recorder + where + pos = Types.completionCursorPosition prefInfo + context fields = Completions.getContext completerRecorder prefInfo fields + completerRecorder = cmapWithPrio LogCompletions recorder diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs deleted file mode 100644 index 3b46eec128..0000000000 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd.hs +++ /dev/null @@ -1,326 +0,0 @@ -{-# LANGUAGE AllowAmbiguousTypes #-} -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE ExplicitNamespaces #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PartialTypeSignatures #-} -{-# LANGUAGE RecordWildCards #-} - -module Ide.Plugin.Cabal.CabalAdd -( findResponsibleCabalFile - , addDependencySuggestCodeAction - , hiddenPackageSuggestion - , cabalAddCommand - , command - , Log -) -where - -import Control.Monad (filterM, void) -import Control.Monad.IO.Class (MonadIO, liftIO) -import Control.Monad.Trans.Class (lift) -import Control.Monad.Trans.Except -import Data.Aeson.Types (FromJSON, - ToJSON, toJSON) -import Data.ByteString (ByteString) -import qualified Data.ByteString.Char8 as B -import Data.List.NonEmpty (NonEmpty (..), - fromList) -import Data.String (IsString) -import qualified Data.Text as T -import Data.Text.Encoding (encodeUtf8) -import qualified Data.Text.Encoding as T -import Data.Text.Utf16.Rope.Mixed as Rope -import Development.IDE (IdeState, - getFileContents, - useWithStale) -import Development.IDE.Core.Rules (runAction) -import Distribution.Client.Add as Add -import Distribution.Compat.Prelude (Generic) -import Distribution.PackageDescription (GenericPackageDescription, - packageDescription, - specVersion) -import Distribution.PackageDescription.Configuration (flattenPackageDescription) -import Distribution.PackageDescription.Quirks (patchQuirks) -import qualified Distribution.Pretty as Pretty -import Distribution.Simple.BuildTarget (BuildTarget, - buildTargetComponentName, - readBuildTargets) -import Distribution.Simple.Utils (safeHead) -import Distribution.Verbosity (silent, - verboseNoStderr) -import Ide.Logger -import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), - ParseCabalFile (..)) -import Ide.Plugin.Cabal.Orphans () -import Ide.Plugin.Error -import Ide.PluginUtils (WithDeletions (SkipDeletions), - diffText, - mkLspCommand) -import Ide.Types (CommandFunction, - CommandId (CommandId), - PluginId, - pluginGetClientCapabilities, - pluginSendRequest) -import Language.LSP.Protocol.Message (SMethod (SMethod_WorkspaceApplyEdit)) -import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), - ClientCapabilities, - CodeAction (CodeAction), - CodeActionKind (CodeActionKind_QuickFix), - Diagnostic (..), - Null (Null), - VersionedTextDocumentIdentifier, - WorkspaceEdit, - toNormalizedFilePath, - type (|?) (InR)) -import System.Directory (doesFileExist, - listDirectory) -import System.FilePath (dropFileName, - makeRelative, - splitPath, - takeExtension, - ()) -import Text.PrettyPrint (render) -import Text.Regex.TDFA - -data Log - = LogFoundResponsibleCabalFile FilePath - | LogCalledCabalAddCommand CabalAddCommandParams - | LogCreatedEdit WorkspaceEdit - | LogExecutedCommand - deriving (Show) - -instance Pretty Log where - pretty = \case - LogFoundResponsibleCabalFile fp -> "Located the responsible cabal file at " <+> pretty fp - LogCalledCabalAddCommand params -> "Called CabalAdd command with:\n" <+> pretty params - LogCreatedEdit edit -> "Created inplace edit:\n" <+> pretty edit - LogExecutedCommand -> "Executed CabalAdd command" - -cabalAddCommand :: IsString p => p -cabalAddCommand = "cabalAdd" - -data CabalAddCommandParams = - CabalAddCommandParams { cabalPath :: FilePath - , verTxtDocId :: VersionedTextDocumentIdentifier - , buildTarget :: Maybe String - , dependency :: T.Text - , version :: Maybe T.Text - } - deriving (Generic, Show) - deriving anyclass (FromJSON, ToJSON) - -instance Pretty CabalAddCommandParams where - pretty CabalAddCommandParams{..} = - "CabalAdd parameters:" <+> vcat - [ "cabal path:" <+> pretty cabalPath - , "target:" <+> pretty buildTarget - , "dependendency:" <+> pretty dependency - , "version:" <+> pretty version - ] - --- | Creates a code action that calls the `cabalAddCommand`, --- using dependency-version suggestion pairs as input. --- --- Returns disabled action if no cabal files given. --- --- Takes haskell file and cabal file paths to create a relative path --- to the haskell file, which is used to get a `BuildTarget`. --- --- In current implementation the dependency is being added to the main found --- build target, but if there will be a way to get all build targets from a file --- it will be possible to support addition to a build target of choice. -addDependencySuggestCodeAction - :: PluginId - -> VersionedTextDocumentIdentifier -- ^ Cabal's versioned text identifier - -> [(T.Text, T.Text)] -- ^ A dependency-version suggestion pairs - -> FilePath -- ^ Path to the haskell file (source of diagnostics) - -> FilePath -- ^ Path to the cabal file (that will be edited) - -> GenericPackageDescription - -> IO [CodeAction] -addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath cabalFilePath gpd = do - buildTargets <- liftIO $ getBuildTargets gpd cabalFilePath haskellFilePath - case buildTargets of - -- If there are no build targets found, run `cabal-add` command with default behaviour - [] -> pure $ mkCodeAction cabalFilePath Nothing <$> suggestions - -- Otherwise provide actions for all found targets - targets -> pure $ concat [mkCodeAction cabalFilePath (Just $ buildTargetToStringRepr target) <$> - suggestions | target <- targets] - where - -- | Note the use of `pretty` function. - -- It converts the `BuildTarget` to an acceptable string representation. - -- It will be used in as the input for `cabal-add`'s `executeConfig`. - buildTargetToStringRepr target = render $ Pretty.pretty $ buildTargetComponentName target - - -- | Gives the build targets that are used in the `CabalAdd`. - -- Note the unorthodox usage of `readBuildTargets`: - -- If the relative path to the haskell file is provided, - -- the `readBuildTargets` will return build targets, where this - -- module is mentioned (in exposed-modules or other-modules). - getBuildTargets :: GenericPackageDescription -> FilePath -> FilePath -> IO [BuildTarget] - getBuildTargets gpd cabalFilePath haskellFilePath = do - let haskellFileRelativePath = makeRelative (dropFileName cabalFilePath) haskellFilePath - readBuildTargets (verboseNoStderr silent) (flattenPackageDescription gpd) [haskellFileRelativePath] - - mkCodeAction :: FilePath -> Maybe String -> (T.Text, T.Text) -> CodeAction - mkCodeAction cabalFilePath target (suggestedDep, suggestedVersion) = - let - versionTitle = if T.null suggestedVersion then T.empty else "-" <> suggestedVersion - targetTitle = case target of - Nothing -> T.empty - Just t -> " at " <> T.pack t - title = "Add dependency " <> suggestedDep <> versionTitle <> targetTitle - version = if T.null suggestedVersion then Nothing else Just suggestedVersion - - params = CabalAddCommandParams {cabalPath = cabalFilePath - , verTxtDocId = verTxtDocId - , buildTarget = target - , dependency = suggestedDep - , version=version} - command = mkLspCommand plId (CommandId cabalAddCommand) "Add missing dependency" (Just [toJSON params]) - in CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing Nothing (Just command) Nothing - --- | Gives a mentioned number of @(dependency, version)@ pairs --- found in the "hidden package" diagnostic message. --- --- For example, if a ghc error looks like this: --- --- > "Could not load module ‘Data.List.Split’ --- > It is a member of the hidden package ‘split-0.2.5’. --- > Perhaps you need to add ‘split’ to the build-depends in your .cabal file." --- --- or this if PackageImports extension is used: --- --- > "Could not find module ‘Data.List.Split’ --- > Perhaps you meant --- > Data.List.Split (needs flag -package-id split-0.2.5)" --- --- It extracts mentioned package names and version numbers. --- In this example, it will be @[("split", "0.2.5")]@ --- --- Also supports messages without a version. --- --- > "Perhaps you need to add ‘split’ to the build-depends in your .cabal file." --- --- Will turn into @[("split", "")]@ -hiddenPackageSuggestion :: Diagnostic -> [(T.Text, T.Text)] -hiddenPackageSuggestion diag = getMatch (msg =~ regex) - where - msg :: T.Text - msg = _message diag - regex :: T.Text -- TODO: Support multiple packages suggestion - regex = - let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?" - in "It is a member of the hidden package [\8216']" <> regex' <> "[\8217']" - <> "|" - <> "needs flag -package-id " <> regex' - -- Have to do this matching because `Regex.TDFA` doesn't(?) support - -- not-capturing groups like (?:message) - getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)] - getMatch (_, _, _, []) = [] - getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)] - getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)] - getMatch (_, _, _, _) = [] - -command :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddCommandParams -command recorder state _ params@(CabalAddCommandParams {cabalPath = path, verTxtDocId = verTxtDocId, buildTarget = target, dependency = dep, version = mbVer}) = do - logWith recorder Debug $ LogCalledCabalAddCommand params - let specifiedDep = case mbVer of - Nothing -> dep - Just ver -> dep <> " ^>=" <> ver - caps <- lift pluginGetClientCapabilities - let env = (state, caps, verTxtDocId) - edit <- getDependencyEdit recorder env path target (fromList [T.unpack specifiedDep]) - void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) - logWith recorder Debug LogExecutedCommand - pure $ InR Null - --- | Constructs prerequisites for the @executeConfig@ --- and runs it, given path to the cabal file and a dependency message. --- Given the new contents of the cabal file constructs and returns the @edit@. --- Inspired by @main@ in cabal-add, --- Distribution.Client.Main -getDependencyEdit :: MonadIO m => Recorder (WithPriority Log) -> (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> - FilePath -> Maybe String -> NonEmpty String -> ExceptT PluginError m WorkspaceEdit -getDependencyEdit recorder env cabalFilePath buildTarget dependency = do - let (state, caps, verTxtDocId) = env - (mbCnfOrigContents, mbFields, mbPackDescr) <- liftIO $ runAction "cabal.cabal-add" state $ do - contents <- getFileContents $ toNormalizedFilePath cabalFilePath - inFields <- useWithStale ParseCabalFields $ toNormalizedFilePath cabalFilePath - inPackDescr <- useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath - let mbCnfOrigContents = case contents of - (Just txt) -> Just $ encodeUtf8 $ Rope.toText txt - _ -> Nothing - let mbFields = fst <$> inFields - let mbPackDescr = fst <$> inPackDescr - pure (mbCnfOrigContents, mbFields, mbPackDescr) - - -- Check if required info was received, - -- otherwise fall back on other options. - (cnfOrigContents, fields, packDescr) <- do - cnfOrigContents <- case mbCnfOrigContents of - (Just cnfOrigContents) -> pure cnfOrigContents - Nothing -> readCabalFile cabalFilePath - (fields, packDescr) <- case (mbFields, mbPackDescr) of - (Just fields, Just packDescr) -> pure (fields, packDescr) - (_, _) -> case parseCabalFile cabalFilePath cnfOrigContents of - Left err -> throwE $ PluginInternalError $ T.pack err - Right (f ,gpd) -> pure (f, gpd) - pure (cnfOrigContents, fields, packDescr) - - let inputs = do - let rcnfComponent = buildTarget - let specVer = specVersion $ packageDescription packDescr - cmp <- resolveComponent cabalFilePath (fields, packDescr) rcnfComponent - deps <- traverse (validateDependency specVer) dependency - pure (fields, packDescr, cmp, deps) - - (cnfFields, origPackDescr, cnfComponent, cnfDependencies) <- case inputs of - Left err -> throwE $ PluginInternalError $ T.pack err - Right pair -> pure pair - - case executeConfig (validateChanges origPackDescr) (Config {..}) of - Nothing -> throwE $ PluginInternalError $ T.pack $ "Cannot extend build-depends in " ++ cabalFilePath - Just newContents -> do - let edit = diffText caps (verTxtDocId, T.decodeUtf8 cnfOrigContents) (T.decodeUtf8 newContents) SkipDeletions - logWith recorder Debug $ LogCreatedEdit edit - pure edit - --- | Given a path to a haskell file, returns the closest cabal file. --- If a package.yaml is present in same directory as the .cabal file, returns nothing, because adding a dependency to a generated cabal file --- will break propagation of changes from package.yaml to cabal files in stack projects. --- If cabal file wasn't found, gives Nothing. -findResponsibleCabalFile :: FilePath -> IO (Maybe FilePath) -findResponsibleCabalFile haskellFilePath = do - let dirPath = dropFileName haskellFilePath - allDirPaths = reverse $ scanl1 () (splitPath dirPath) -- sorted from most to least specific - go allDirPaths - where - go [] = pure Nothing - go (path:ps) = do - objects <- listDirectory path - let objectsWithPaths = map (\obj -> path <> obj) objects - objectsCabalExtension = filter (\c -> takeExtension c == ".cabal") objectsWithPaths - cabalFiles <- filterM (\c -> doesFileExist c) objectsCabalExtension - case safeHead cabalFiles of - Nothing -> go ps - Just cabalFile -> guardAgainstHpack path cabalFile - where - guardAgainstHpack :: FilePath -> FilePath -> IO (Maybe FilePath) - guardAgainstHpack path cabalFile = do - exists <- doesFileExist $ path "package.yaml" - if exists then pure Nothing else pure $ Just cabalFile - --- | Gives cabal file's contents or throws error. --- Inspired by @readCabalFile@ in cabal-add, --- Distribution.Client.Main --- --- This is a fallback option! --- Use only if the `GetFileContents` fails. -readCabalFile :: MonadIO m => FilePath -> ExceptT PluginError m ByteString -readCabalFile fileName = do - cabalFileExists <- liftIO $ doesFileExist fileName - if cabalFileExists - then snd . patchQuirks <$> liftIO (B.readFile fileName) - else throwE $ PluginInternalError $ T.pack ("Failed to read cabal file at " <> fileName) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs new file mode 100644 index 0000000000..d72ad290fd --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/CodeAction.hs @@ -0,0 +1,343 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.CodeAction where + +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.Aeson.Types (toJSON) +import Data.Foldable (asum) +import Data.Maybe (mapMaybe) +import qualified Data.Text as T +import Development.IDE.Core.PluginUtils (uriToFilePathE) +import Development.IDE.Types.Location (Uri) +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Pretty as CabalPretty +import Distribution.Simple.BuildTarget (BuildTarget, + buildTargetComponentName, + readBuildTargets) +import Distribution.Utils.Path (getSymbolicPath) +import Distribution.Verbosity (silent, + verboseNoStderr) +import Ide.Logger +import Ide.Plugin.Cabal.CabalAdd.Types +import Ide.Plugin.Cabal.Completion.Completer.Module (fpToExposedModulePath) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (mkLspCommand) +import Ide.Types (CommandId (CommandId), + PluginId) + +import Control.Lens ((^.)) +import qualified Language.LSP.Protocol.Lens as JL +import Language.LSP.Protocol.Types (CodeActionKind (..), + VersionedTextDocumentIdentifier) +import qualified Language.LSP.Protocol.Types as J +import System.FilePath +import Text.PrettyPrint (render) +import Text.Regex.TDFA + +-------------------------------------------- +-- Add module to cabal file +-------------------------------------------- + +{- | Takes a path to a cabal file, a module path in exposed module syntax + and the contents of the cabal file and generates all possible + code actions for inserting the module into the cabal file + with the given contents. +-} +collectModuleInsertionOptions :: + (MonadIO m) => + Recorder (WithPriority Log) -> + PluginId -> + VersionedTextDocumentIdentifier -> + J.Diagnostic -> + -- | The file path of the cabal file to insert the new module into + FilePath -> + -- | The generic package description of the cabal file to insert the new module into. + GenericPackageDescription -> + -- | The URI of the unknown haskell file/new module to insert into the cabal file. + Uri -> + ExceptT PluginError m [J.CodeAction] +collectModuleInsertionOptions _ plId txtDocIdentifier diag cabalFilePath gpd haskellFilePathURI = do + haskellFilePath <- uriToFilePathE haskellFilePathURI + let configs = concatMap (mkModuleInsertionConfig txtDocIdentifier cabalFilePath haskellFilePath) (makeStanzaItems gpd) + pure $ map (mkCodeActionForModulePath plId diag) configs + where + makeStanzaItems :: GenericPackageDescription -> [StanzaItem] + makeStanzaItems gpd = + mainLibItem pd + ++ libItems pd + ++ executableItems pd + ++ testSuiteItems pd + ++ benchmarkItems pd + where + pd = flattenPackageDescription gpd + +{- | Takes a buildInfo of a cabal file component as defined in the generic package description, + and translates it to filepaths of the component's hsSourceDirs, + to be processed for adding modules to exposed-, or other-modules fields in a cabal file. +-} +buildInfoToHsSourceDirs :: BuildInfo -> [FilePath] +buildInfoToHsSourceDirs buildInfo = map getSymbolicPath hsSourceDirs' + where + hsSourceDirs' = hsSourceDirs buildInfo + +{- | Takes the path to the cabal file to insert the module into, + the module path to be inserted, and a stanza representation. + + Returns a list of module insertion configs, where each config + represents a possible place to insert the module. +-} +mkModuleInsertionConfig :: VersionedTextDocumentIdentifier -> FilePath -> FilePath -> StanzaItem -> [ModuleInsertionConfig] +mkModuleInsertionConfig txtDocIdentifier cabalFilePath haskellFilePath (StanzaItem{..}) = do + case mkRelativeModulePathM siHsSourceDirs cabalFilePath haskellFilePath of + Just processedModPath -> + [modInsertItem processedModPath "other-modules"] + ++ [modInsertItem processedModPath "exposed-modules" | CLibName _ <- [siComponent]] + _ -> [] + where + modInsertItem :: T.Text -> T.Text -> ModuleInsertionConfig + modInsertItem modPath label = + ModuleInsertionConfig + { targetFile = cabalFilePath + , moduleToInsert = modPath + , modVerTxtDocId = txtDocIdentifier + , insertionStanza = siComponent + , insertionLabel = label + } + +mkCodeActionForModulePath :: PluginId -> J.Diagnostic -> ModuleInsertionConfig -> J.CodeAction +mkCodeActionForModulePath plId diag insertionConfig = + J.CodeAction + { _title = "Add to " <> label <> " as " <> fieldName + , _kind = Just CodeActionKind_Refactor + , _diagnostics = Just [diag] + , _isPreferred = Nothing + , _disabled = Nothing + , _edit = Nothing + , _command = Just command + , _data_ = Nothing + } + where + fieldName = insertionLabel insertionConfig + command = mkLspCommand plId (CommandId cabalAddModuleCommandId) "Add missing module" (Just [toJSON insertionConfig]) + label = T.pack $ CabalPretty.prettyShow $ insertionStanza insertionConfig + +{- | Takes a list of source subdirectories, a cabal source path and a haskell filepath + and returns a path to the module in exposed module syntax. + The path will be relative to one of the subdirectories, in case the module is contained within one of them. +-} +mkRelativeModulePathM :: [FilePath] -> FilePath -> FilePath -> Maybe T.Text +mkRelativeModulePathM hsSourceDirs cabalSrcPath' haskellFilePath = + asum $ + map + ( \srcDir -> do + let relMP = makeRelative (normalise (cabalSrcPath srcDir)) haskellFilePath + if relMP == haskellFilePath then Nothing else Just $ fpToExposedModulePath cabalSrcPath relMP + ) + hsSourceDirs + where + cabalSrcPath = takeDirectory cabalSrcPath' + +isUnknownModuleDiagnostic :: J.Diagnostic -> Bool +isUnknownModuleDiagnostic diag = (msg =~ regex) + where + msg :: T.Text + msg = diag ^. JL.message + regex :: T.Text + regex = "Loading the module [\8216'][^\8217']*[\8217'] failed." + +-------------------------- +-- Below are several utility functions which create a StanzaItem for each of the possible Stanzas, +-- these all have specific constructors we need to match, so we can't generalise this process well. +-------------------------- + +benchmarkItems :: PackageDescription -> [StanzaItem] +benchmarkItems pd = + map + ( \benchmark -> + StanzaItem + { siComponent = CBenchName $ benchmarkName benchmark + , siHsSourceDirs = buildInfoToHsSourceDirs $ benchmarkBuildInfo benchmark + } + ) + (benchmarks pd) + +testSuiteItems :: PackageDescription -> [StanzaItem] +testSuiteItems pd = + map + ( \testSuite -> + StanzaItem + { siComponent = CTestName $ testName testSuite + , siHsSourceDirs = buildInfoToHsSourceDirs $ testBuildInfo testSuite + } + ) + (testSuites pd) + +executableItems :: PackageDescription -> [StanzaItem] +executableItems pd = + map + ( \executable -> + StanzaItem + { siComponent = CExeName $ exeName executable + , siHsSourceDirs = buildInfoToHsSourceDirs $ buildInfo executable + } + ) + (executables pd) + +libItems :: PackageDescription -> [StanzaItem] +libItems pd = + mapMaybe + ( \subLib -> + case libName subLib of + LSubLibName compName -> + Just + StanzaItem + { siComponent = CLibName $ LSubLibName compName + , siHsSourceDirs = buildInfoToHsSourceDirs $ libBuildInfo subLib + } + _ -> Nothing + ) + (subLibraries pd) + +mainLibItem :: PackageDescription -> [StanzaItem] +mainLibItem pd = + case library pd of + Just lib -> + [ StanzaItem + { siComponent = CLibName LMainLibName + , siHsSourceDirs = buildInfoToHsSourceDirs $ libBuildInfo lib + } + ] + Nothing -> [] + +-------------------------------------------- +-- Add dependency to a cabal file +-------------------------------------------- + +{- | Creates a code action that calls the `cabalAddCommand`, + using dependency-version suggestion pairs as input. + + Returns disabled action if no cabal files given. + + Takes haskell and cabal file paths to create a relative path + to the haskell file, which is used to get a `BuildTarget`. +-} +addDependencySuggestCodeAction :: + PluginId -> + -- | Cabal's versioned text identifier + VersionedTextDocumentIdentifier -> + -- | A dependency-version suggestion pairs + [(T.Text, T.Text)] -> + -- | Path to the haskell file (source of diagnostics) + FilePath -> + -- | Path to the cabal file (that will be edited) + FilePath -> + GenericPackageDescription -> + IO [J.CodeAction] +addDependencySuggestCodeAction plId verTxtDocId suggestions haskellFilePath cabalFilePath gpd = do + buildTargets <- liftIO $ getBuildTargets gpd cabalFilePath haskellFilePath + case buildTargets of + -- If there are no build targets found, run the `cabal-add` command with default behaviour + [] -> pure $ mkCodeActionForDependency cabalFilePath Nothing <$> suggestions + -- Otherwise provide actions for all found targets + targets -> + pure $ + concat + [ mkCodeActionForDependency cabalFilePath (Just $ buildTargetToStringRepr target) + <$> suggestions + | target <- targets + ] + where + {- | Note the use of the `pretty` function. + It converts the `BuildTarget` to an acceptable string representation. + It will be used as the input for `cabal-add`'s `executeConfig`. + -} + buildTargetToStringRepr target = render $ CabalPretty.pretty $ buildTargetComponentName target + + {- | Finds the build targets that are used in `cabal-add`. + Note the unorthodox usage of `readBuildTargets`: + If the relative path to the haskell file is provided, + `readBuildTargets` will return the build targets, this + module is mentioned in (either exposed-modules or other-modules). + -} + getBuildTargets :: GenericPackageDescription -> FilePath -> FilePath -> IO [BuildTarget] + getBuildTargets gpd cabalFilePath haskellFilePath = do + let haskellFileRelativePath = makeRelative (dropFileName cabalFilePath) haskellFilePath + readBuildTargets (verboseNoStderr silent) (flattenPackageDescription gpd) [haskellFileRelativePath] + + mkCodeActionForDependency :: FilePath -> Maybe String -> (T.Text, T.Text) -> J.CodeAction + mkCodeActionForDependency cabalFilePath target (suggestedDep, suggestedVersion) = + let + versionTitle = if T.null suggestedVersion then T.empty else "-" <> suggestedVersion + targetTitle = case target of + Nothing -> T.empty + Just t -> " at " <> T.pack t + title = "Add dependency " <> suggestedDep <> versionTitle <> targetTitle + version = if T.null suggestedVersion then Nothing else Just suggestedVersion + + params = + CabalAddDependencyCommandParams + { depCabalPath = cabalFilePath + , depVerTxtDocId = verTxtDocId + , depBuildTarget = target + , depDependency = suggestedDep + , depVersion = version + } + command = mkLspCommand plId (CommandId cabalAddDependencyCommandId) "Add dependency" (Just [toJSON params]) + in + J.CodeAction title (Just CodeActionKind_QuickFix) (Just []) Nothing Nothing Nothing (Just command) Nothing + +{- | Gives a mentioned number of @(dependency, version)@ pairs +found in the "hidden package" diagnostic message. + +For example, if a ghc error looks like this: + +> "Could not load module ‘Data.List.Split’ +> It is a member of the hidden package ‘split-0.2.5’. +> Perhaps you need to add ‘split’ to the build-depends in your .cabal file." + +or this if PackageImports extension is used: + +> "Could not find module ‘Data.List.Split’ +> Perhaps you meant +> Data.List.Split (needs flag -package-id split-0.2.5)" + +It extracts mentioned package names and version numbers. +In this example, it will be @[("split", "0.2.5")]@ + +Also supports messages without a version. + +> "Perhaps you need to add ‘split’ to the build-depends in your .cabal file." + +Will turn into @[("split", "")]@ +-} +hiddenPackageSuggestion :: J.Diagnostic -> [(T.Text, T.Text)] +hiddenPackageSuggestion diag = getMatch (msg =~ regex) + where + msg :: T.Text + msg = diag ^. JL.message + regex :: T.Text + regex = + let regex' = "([a-zA-Z0-9-]*[a-zA-Z0-9])(-([0-9\\.]*))?" + in "It is a member of the hidden package [\8216']" + <> regex' + <> "[\8217']" + <> "|" + <> "needs flag -package-id " + <> regex' + -- Have to do this matching because `Regex.TDFA` doesn't(?) support + -- not-capturing groups like (?:message) + getMatch :: (T.Text, T.Text, T.Text, [T.Text]) -> [(T.Text, T.Text)] + getMatch (_, _, _, []) = [] + getMatch (_, _, _, [dependency, _, cleanVersion, "", "", ""]) = [(dependency, cleanVersion)] + getMatch (_, _, _, ["", "", "", dependency, _, cleanVersion]) = [(dependency, cleanVersion)] + getMatch (_, _, _, _) = [] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs new file mode 100644 index 0000000000..83554c6a82 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Command.hs @@ -0,0 +1,232 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.Command ( + cabalAddDependencyCommandId, + cabalAddModuleCommandId, + addDependencyCommand, + addModuleCommand, + Log, +) +where + +import Control.Monad (void) +import Control.Monad.Except (modifyError) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.Except +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import Data.List.NonEmpty (singleton) +import qualified Data.Text as T +import Data.Text.Encoding (encodeUtf8) +import qualified Data.Text.Encoding as T +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE.Core.FileStore (getFileContents) +import Development.IDE.Core.Rules (IdeState) +import Development.IDE.Core.Service (runAction) +import Development.IDE.Core.Shake (useWithStale) +import Distribution.Client.Add as Add +import Distribution.Fields (Field) +import Distribution.PackageDescription +import Distribution.Parsec.Position (Position) +import qualified Distribution.Pretty as CabalPretty +import Ide.Logger +import Ide.Plugin.Cabal.CabalAdd.Types +import Ide.Plugin.Cabal.Completion.Types (ParseCabalFields (..), + ParseCabalFile (..)) +import Ide.Plugin.Cabal.Files +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import Ide.PluginUtils (WithDeletions (SkipDeletions), + diffText) +import Ide.Types (CommandFunction, + pluginGetClientCapabilities, + pluginSendRequest) +import Language.LSP.Protocol.Message (SMethod (SMethod_WorkspaceApplyEdit)) +import Language.LSP.Protocol.Types (ApplyWorkspaceEditParams (ApplyWorkspaceEditParams), + ClientCapabilities, + Null (Null), + VersionedTextDocumentIdentifier, + WorkspaceEdit, + toNormalizedFilePath, + type (|?) (InR)) + +-------------------------------------------- +-- Add module to cabal file +-------------------------------------------- + +addModuleCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState ModuleInsertionConfig +addModuleCommand recorder state _ params@(ModuleInsertionConfig{..}) = do + logWith recorder Debug $ LogCalledCabalAddModuleCommand params + caps <- lift pluginGetClientCapabilities + let env = (state, caps, modVerTxtDocId) + edit <- getModuleEdit recorder env targetFile insertionStanza (T.unpack insertionLabel) (T.unpack moduleToInsert) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +{- | Constructs prerequisites for the @executeConfig@ + and runs it, given path to the cabal file and a dependency message. + Given the new contents of the cabal file constructs and returns the @edit@. + + Inspired by @main@ in cabal-add, Distribution.Client.Main +-} +getModuleEdit :: + forall m. + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + -- | The cabal file to edit. + FilePath -> + -- | The component to add the module to. + ComponentName -> + -- | The specific field in the component to add the module to. + String -> + -- | The module to add. + String -> + ExceptT PluginError m WorkspaceEdit +getModuleEdit recorder env cabalFilePath stanza targetFieldStr modulePath = + mkCabalAddConfig + recorder + env + cabalFilePath + mkConfig + where + mkConfig :: (ByteString -> [Field Position] -> GenericPackageDescription -> ExceptT PluginError m AddConfig) + mkConfig cnfOrigContents fields packDescr = do + compName <- + case Add.resolveComponent cabalFilePath (fields, packDescr) $ Just $ CabalPretty.prettyShow stanza of + Right x -> pure x + Left err -> do + logWith recorder Info $ LogFailedToResolveComponent err + throwE $ PluginInternalError $ T.pack err + pure $ + AddConfig + { cnfOrigContents = cnfOrigContents + , cnfFields = fields + , cnfComponent = compName + , cnfTargetField = if targetFieldStr == "exposed-modules" then ExposedModules else OtherModules + , cnfAdditions = singleton $ B.pack modulePath + } + +-------------------------------------------- +-- Add build dependency to cabal file +-------------------------------------------- + +addDependencyCommand :: Recorder (WithPriority Log) -> CommandFunction IdeState CabalAddDependencyCommandParams +addDependencyCommand recorder state _ params@(CabalAddDependencyCommandParams{..}) = do + logWith recorder Debug $ LogCalledCabalAddDependencyCommand params + let specifiedDep = case depVersion of + Nothing -> depDependency + Just ver -> depDependency <> " ^>=" <> ver + caps <- lift pluginGetClientCapabilities + let env = (state, caps, depVerTxtDocId) + edit <- getDependencyEdit recorder env depCabalPath depBuildTarget (T.unpack specifiedDep) + void $ lift $ pluginSendRequest SMethod_WorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing edit) (\_ -> pure ()) + logWith recorder Debug LogExecutedCommand + pure $ InR Null + +{- | Constructs prerequisites for the @executeConfig@ + and runs it, given path to the cabal file and a dependency message. + Given the new contents of the cabal file constructs and returns the @edit@. + Inspired by @main@ in cabal-add, + Distribution.Client.Main +-} +getDependencyEdit :: + forall m. + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + FilePath -> + Maybe String -> + String -> + ExceptT PluginError m WorkspaceEdit +getDependencyEdit recorder env cabalFilePath buildTarget dependency = + mkCabalAddConfig recorder env cabalFilePath mkConfig + where + mkConfig :: (ByteString -> [Field Position] -> GenericPackageDescription -> ExceptT PluginError m AddConfig) + mkConfig cnfOrigContents fields packDescr = do + let specVer = specVersion $ packageDescription packDescr + (deps, compName) <- + modifyError (\t -> PluginInternalError $ T.pack t) $ do + deps <- validateDependency specVer dependency + compName <- resolveComponent cabalFilePath (fields, packDescr) buildTarget + pure (deps, compName) + pure $ + AddConfig + { cnfOrigContents = cnfOrigContents + , cnfFields = fields + , cnfComponent = compName + , cnfTargetField = BuildDepends + , cnfAdditions = singleton deps + } + +-------------------------------------------- +-- Shared Functions +-------------------------------------------- + +mkCabalAddConfig :: + (MonadIO m) => + Recorder (WithPriority Log) -> + (IdeState, ClientCapabilities, VersionedTextDocumentIdentifier) -> + -- | The cabal file to edit + FilePath -> + -- | Callback to allow configuration of 'AddConfig' to be used by `cabal-add` + ( ByteString -> + [Field Position] -> + GenericPackageDescription -> + ExceptT PluginError m AddConfig + ) -> + ExceptT PluginError m WorkspaceEdit +mkCabalAddConfig recorder env cabalFilePath mkConfig = do + let (state, caps, verTxtDocId) = env + (mbCnfOrigContents, mbFields, mbPackDescr) <- liftIO $ runAction "cabal.cabal-add" state $ do + contents <- getFileContents $ toNormalizedFilePath cabalFilePath + inFields <- useWithStale ParseCabalFields $ toNormalizedFilePath cabalFilePath + inPackDescr <- useWithStale ParseCabalFile $ toNormalizedFilePath cabalFilePath + let mbCnfOrigContents = case contents of + (Just txt) -> Just $ encodeUtf8 $ Rope.toText txt + _ -> Nothing + let mbFields = fst <$> inFields + let mbPackDescr = fst <$> inPackDescr + pure (mbCnfOrigContents, mbFields, mbPackDescr) + + -- Check if required info was received, + -- otherwise fall back on other options. + (cnfOrigContents, fields, packDescr) <- do + cnfOrigContents <- case mbCnfOrigContents of + (Just cnfOrigContents) -> pure cnfOrigContents + Nothing -> readCabalFile cabalFilePath + (fields, packDescr) <- case (mbFields, mbPackDescr) of + (Just fields, Just packDescr) -> pure (fields, packDescr) + (_, _) -> case parseCabalFile cabalFilePath cnfOrigContents of + Left err -> throwE $ PluginInternalError $ T.pack err + Right (f, gpd) -> pure (f, gpd) + pure (cnfOrigContents, fields, packDescr) + + cabalAddConfig <- mkConfig cnfOrigContents fields packDescr + + case executeAddConfig (validateChanges packDescr) cabalAddConfig of + Nothing -> + throwE $ + PluginInternalError $ + T.pack $ + "Cannot extend " + ++ show (cnfTargetField cabalAddConfig) + ++ " of " + ++ case (cnfComponent cabalAddConfig) of + Right compName -> showComponentName compName + Left commonStanza -> show commonStanza + ++ " in " + ++ cabalFilePath + Just newContents -> do + let edit = diffText caps (verTxtDocId, T.decodeUtf8 cnfOrigContents) (T.decodeUtf8 newContents) SkipDeletions + logWith recorder Debug $ LogCreatedEdit edit + pure edit diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs new file mode 100644 index 0000000000..62d6b7a7d3 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/CabalAdd/Types.hs @@ -0,0 +1,104 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExplicitNamespaces #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} + +module Ide.Plugin.Cabal.CabalAdd.Types where + +import Data.Aeson.Types (FromJSON, ToJSON) +import Data.String (IsString) +import qualified Data.Text as T +import Distribution.Compat.Prelude (Generic) +import Distribution.PackageDescription +import Ide.Logger +import Ide.Plugin.Cabal.Orphans () +import Language.LSP.Protocol.Types + +data Log + = LogFoundResponsibleCabalFile FilePath + | LogCalledCabalAddDependencyCommand CabalAddDependencyCommandParams + | LogCalledCabalAddModuleCommand ModuleInsertionConfig + | LogCreatedEdit WorkspaceEdit + | LogExecutedCommand + | LogFailedToResolveComponent String + deriving (Show) + +instance Pretty Log where + pretty = \case + LogFoundResponsibleCabalFile fp -> "Located the responsible cabal file at " <+> pretty fp + LogCalledCabalAddDependencyCommand params -> "Called CabalAddDependency command with:\n" <+> pretty params + LogCalledCabalAddModuleCommand params -> "Called CabalAddModule command with:\n" <+> pretty params + LogCreatedEdit edit -> "Created inplace edit:\n" <+> pretty edit + LogExecutedCommand -> "Executed CabalAdd command" + LogFailedToResolveComponent cS -> "Failed to resolve component in CabalAdd with error:" <+> viaShow cS + +cabalAddDependencyCommandId :: (IsString p) => p +cabalAddDependencyCommandId = "cabalAddDependency" + +cabalAddModuleCommandId :: (IsString p) => p +cabalAddModuleCommandId = "cabalAddModule" + +-- | Relevant data needed to add a module to a cabal file. +-- +-- This will be sent as json to the client with a code action we offer to add this dependency to a cabal file. +-- If the user decides to execute the corresponding code action, the client sends us this data again, and we then +-- use it to execute the `CabalAddDependencyCommand`. +data ModuleInsertionConfig = ModuleInsertionConfig + { targetFile :: FilePath + -- ^ The file we want to insert information about the new module into. + , moduleToInsert :: T.Text + -- ^ The module name of the module to be inserted into the targetFile at the insertionPosition. + , modVerTxtDocId :: VersionedTextDocumentIdentifier + , insertionStanza :: ComponentName + -- ^ Which stanza the module will be inserted into. + , insertionLabel :: T.Text + -- ^ A label which describes which field the module will be inserted into. + } + deriving (Show, Eq, Ord, Generic) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty ModuleInsertionConfig where + pretty ModuleInsertionConfig{..} = + "CabalAddModule parameters:" + <+> vcat + [ "cabal path:" <+> pretty targetFile + , "target:" <+> pretty moduleToInsert + , "stanza:" <+> viaShow insertionStanza + , "label:" <+> pretty insertionLabel + ] + +-- | Contains all source directories of a stanza with the name of the first parameter. +data StanzaItem = StanzaItem + { siComponent :: ComponentName + , siHsSourceDirs :: [FilePath] + } + deriving (Show) + +-- | Relevant data needed to add a dependency to a cabal file. +-- +-- This will be sent as json to the client with a code action we offer to add this dependency to a cabal file. +-- If the user decides to execute the corresponding code action, the client sends us this data again, and we then +-- use it to execute the `CabalAddDependencyCommand`. +data CabalAddDependencyCommandParams = CabalAddDependencyCommandParams + { depCabalPath :: FilePath + , depVerTxtDocId :: VersionedTextDocumentIdentifier + , depBuildTarget :: Maybe String + , depDependency :: T.Text + , depVersion :: Maybe T.Text + } + deriving (Generic, Show) + deriving anyclass (FromJSON, ToJSON) + +instance Pretty CabalAddDependencyCommandParams where + pretty CabalAddDependencyCommandParams{..} = + "CabalAddDependency parameters:" + <+> vcat + [ "cabal path:" <+> pretty depCabalPath + , "target:" <+> pretty depBuildTarget + , "dependendency:" <+> pretty depDependency + , "version:" <+> pretty depVersion + ] diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs new file mode 100644 index 0000000000..28cf1e39a8 --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Files.hs @@ -0,0 +1,56 @@ +module Ide.Plugin.Cabal.Files where + +import Control.Monad (filterM) +import Control.Monad.IO.Class (MonadIO, liftIO) +import Control.Monad.Trans.Except +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import qualified Data.Text as T +import Distribution.PackageDescription.Quirks (patchQuirks) +import Distribution.Simple.Utils (safeHead) +import Ide.Plugin.Cabal.Orphans () +import Ide.Plugin.Error +import System.Directory (doesFileExist, + listDirectory) +import System.FilePath + +{- | Given a path to a haskell file, returns the closest cabal file. + If a package.yaml is present in same directory as the .cabal file, returns nothing, + because adding a dependency to a generated cabal file will break propagation of changes + from package.yaml to cabal files in stack projects. + If cabal file wasn't found, returns Nothing. +-} +findResponsibleCabalFile :: FilePath -> IO (Maybe FilePath) +findResponsibleCabalFile haskellFilePath = do + let dirPath = dropFileName haskellFilePath + allDirPaths = reverse $ scanl1 () (splitPath dirPath) -- sorted from most to least specific + go allDirPaths + where + go [] = pure Nothing + go (path : ps) = do + objects <- listDirectory path + let objectsWithPaths = map (\obj -> path <> obj) objects + objectsCabalExtension = filter (\c -> takeExtension c == ".cabal") objectsWithPaths + cabalFiles <- filterM (\c -> doesFileExist c) objectsCabalExtension + case safeHead cabalFiles of + Nothing -> go ps + Just cabalFile -> guardAgainstHpack path cabalFile + where + guardAgainstHpack :: FilePath -> FilePath -> IO (Maybe FilePath) + guardAgainstHpack path cabalFile = do + exists <- doesFileExist $ path "package.yaml" + if exists then pure Nothing else pure $ Just cabalFile + +{- | Gives a cabal file's contents or throws error. + + Inspired by @readCabalFile@ in cabal-add, Distribution.Client.Main + + This is a fallback option! + Use only if the `GetFileContents` fails. +-} +readCabalFile :: (MonadIO m) => FilePath -> ExceptT PluginError m ByteString +readCabalFile fileName = do + cabalFileExists <- liftIO $ doesFileExist fileName + if cabalFileExists + then snd . patchQuirks <$> liftIO (B.readFile fileName) + else throwE $ PluginInternalError $ T.pack ("Failed to read cabal file at " <> fileName) diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs new file mode 100644 index 0000000000..67cf97ccee --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/OfInterest.hs @@ -0,0 +1,122 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.OfInterest (ofInterestRules, getCabalFilesOfInterestUntracked, addFileOfInterest, deleteFileOfInterest, kick, Log) where + +import Control.Concurrent.Strict +import Control.DeepSeq +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import Data.Hashable +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Proxy +import qualified Data.Text () +import Development.IDE as D +import qualified Development.IDE.Core.Shake as Shake +import Development.IDE.Graph (Key, alwaysRerun) +import Development.IDE.Types.Shake (toKey) +import GHC.Generics +import qualified Ide.Plugin.Cabal.Completion.Types as Types +import Ide.Plugin.Cabal.Orphans () + +data Log + = LogShake Shake.Log + | LogFOI (HashMap NormalizedFilePath FileOfInterestStatus) + deriving (Show) + +instance Pretty Log where + pretty = \case + LogShake log' -> pretty log' + LogFOI files -> + "Set files of interest to:" <+> viaShow files + +-- ---------------------------------------------------------------- +-- Cabal file of interest rules and global variable +-- ---------------------------------------------------------------- + +{- | Cabal files that are currently open in the lsp-client. +Specific actions happen when these files are saved, closed or modified, +such as generating diagnostics, re-parsing, etc... + +We need to store the open files to parse them again if we restart the shake session. +Restarting of the shake session happens whenever these files are modified. +-} +newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) + +instance Shake.IsIdeGlobal OfInterestCabalVar + +data IsCabalFileOfInterest = IsCabalFileOfInterest + deriving (Eq, Show, Generic) +instance Hashable IsCabalFileOfInterest +instance NFData IsCabalFileOfInterest + +type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult + +data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus + deriving (Eq, Show, Generic) +instance Hashable CabalFileOfInterestResult +instance NFData CabalFileOfInterestResult + +{- | The rule that initialises the files of interest state. + +Needs to be run on start-up. +-} +ofInterestRules :: Recorder (WithPriority Log) -> Rules () +ofInterestRules recorder = do + Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty) + Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do + alwaysRerun + filesOfInterest <- getCabalFilesOfInterestUntracked + let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest + fp = summarize foi + res = (Just fp, Just foi) + return res + where + summarize NotCabalFOI = BS.singleton 0 + summarize (IsCabalFOI OnDisk) = BS.singleton 1 + summarize (IsCabalFOI (Modified False)) = BS.singleton 2 + summarize (IsCabalFOI (Modified True)) = BS.singleton 3 + +getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) +getCabalFilesOfInterestUntracked = do + OfInterestCabalVar var <- Shake.getIdeGlobalAction + liftIO $ readVar var + +addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO [Key] +addFileOfInterest recorder state f v = do + OfInterestCabalVar var <- Shake.getIdeGlobalState state + (prev, files) <- modifyVar var $ \dict -> do + let (prev, new) = HashMap.alterF (,Just v) f dict + pure (new, (prev, new)) + if prev /= Just v + then do + log' Debug $ LogFOI files + return [toKey IsCabalFileOfInterest f] + else return [] + where + log' = logWith recorder + +deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO [Key] +deleteFileOfInterest recorder state f = do + OfInterestCabalVar var <- Shake.getIdeGlobalState state + files <- modifyVar' var $ HashMap.delete f + log' Debug $ LogFOI files + return [toKey IsFileOfInterest f] + where + log' = logWith recorder + +{- | This is the kick function for the cabal plugin. +We run this action, whenever we shake session us run/restarted, which triggers +actions to produce diagnostics for cabal files. + +It is paramount that this kick-function can be run quickly, since it is a blocking +function invocation. +-} +kick :: Action () +kick = do + files <- HashMap.keys <$> getCabalFilesOfInterestUntracked + Shake.runWithSignal (Proxy @"kick/start/cabal") (Proxy @"kick/done/cabal") files Types.ParseCabalFile diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs index 2264d5390f..8ecb361025 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Orphans.hs @@ -1,8 +1,14 @@ {-# OPTIONS_GHC -Wno-orphans #-} module Ide.Plugin.Cabal.Orphans where import Control.DeepSeq +import Data.Aeson +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Types as Aeson +import qualified Data.Text as T import Distribution.Fields.Field -import Distribution.Parsec.Position +import Distribution.PackageDescription (ComponentName) +import Distribution.Parsec +import Distribution.Pretty (prettyShow) -- ---------------------------------------------------------------- -- Cabal-syntax orphan instances we need sometimes @@ -22,3 +28,12 @@ instance NFData (SectionArg Position) where rnf (SecArgName ann bs) = rnf ann `seq` rnf bs rnf (SecArgStr ann bs) = rnf ann `seq` rnf bs rnf (SecArgOther ann bs) = rnf ann `seq` rnf bs + +instance ToJSON ComponentName where + toJSON = Aeson.String . T.pack . prettyShow + +instance FromJSON ComponentName where + parseJSON = Aeson.withText "ComponentName" $ \t -> + case eitherParsec (T.unpack t) of + Left err -> Aeson.parseFail err + Right r -> pure r diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs index e949af1b1d..f2b3d74639 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Parse.hs @@ -22,9 +22,9 @@ import qualified Distribution.Parsec.Position as Syntax parseCabalFileContents :: BS.ByteString -- ^ UTF-8 encoded bytestring - -> IO ([PWarning], Either (Maybe Version, NonEmpty PError) GenericPackageDescription) + -> ([PWarning], Either (Maybe Version, NonEmpty PError) GenericPackageDescription) parseCabalFileContents bs = - pure $ runParseResult (parseGenericPackageDescription bs) + runParseResult (parseGenericPackageDescription bs) readCabalFields :: NormalizedFilePath -> diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs new file mode 100644 index 0000000000..de7bb9a5fd --- /dev/null +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal/Rules.hs @@ -0,0 +1,160 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeFamilies #-} + +module Ide.Plugin.Cabal.Rules (cabalRules, Log) where + +import Control.Monad.IO.Class +import qualified Data.ByteString as BS +import qualified Data.List as List +import qualified Data.List.NonEmpty as NE +import qualified Data.Maybe as Maybe +import qualified Data.Text () +import qualified Data.Text as T +import qualified Data.Text.Encoding as Encoding +import Data.Text.Utf16.Rope.Mixed as Rope +import Development.IDE as D +import qualified Development.IDE.Core.Shake as Shake +import qualified Distribution.CabalSpecVersion as Cabal +import qualified Distribution.Fields as Syntax +import Distribution.Parsec.Error +import qualified Ide.Plugin.Cabal.Completion.Data as Data +import Ide.Plugin.Cabal.Completion.Types (ParseCabalCommonSections (ParseCabalCommonSections), + ParseCabalFields (..), + ParseCabalFile (..)) +import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics +import qualified Ide.Plugin.Cabal.OfInterest as OfInterest +import Ide.Plugin.Cabal.Orphans () +import qualified Ide.Plugin.Cabal.Parse as Parse +import Ide.Types +import Text.Regex.TDFA + +data Log + = LogModificationTime NormalizedFilePath FileVersion + | LogShake Shake.Log + | LogOfInterest OfInterest.Log + | LogDocSaved Uri + deriving (Show) + +instance Pretty Log where + pretty = \case + LogShake log' -> pretty log' + LogOfInterest log' -> pretty log' + LogModificationTime nfp modTime -> + "Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime) + LogDocSaved uri -> + "Saved text document:" <+> pretty (getUri uri) + +cabalRules :: Recorder (WithPriority Log) -> PluginId -> Rules () +cabalRules recorder plId = do + -- Make sure we initialise the cabal files-of-interest. + OfInterest.ofInterestRules (cmapWithPrio LogOfInterest recorder) + -- Rule to produce diagnostics for cabal files. + define (cmapWithPrio LogShake recorder) $ \ParseCabalFields file -> do + config <- getPluginConfigAction plId + if not (plcGlobalOn config && plcDiagnosticsOn config) + then pure ([], Nothing) + else do + -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), + -- we rerun this rule because this rule *depends* on GetModificationTime. + (t, mCabalSource) <- use_ GetFileContents file + log' Debug $ LogModificationTime file t + contents <- case mCabalSource of + Just sources -> + pure $ Encoding.encodeUtf8 $ Rope.toText sources + Nothing -> do + liftIO $ BS.readFile $ fromNormalizedFilePath file + + case Parse.readCabalFields file contents of + Left _ -> + pure ([], Nothing) + Right fields -> + pure ([], Just fields) + + define (cmapWithPrio LogShake recorder) $ \ParseCabalCommonSections file -> do + fields <- use_ ParseCabalFields file + let commonSections = + Maybe.mapMaybe + ( \case + commonSection@(Syntax.Section (Syntax.Name _ "common") _ _) -> Just commonSection + _ -> Nothing + ) + fields + pure ([], Just commonSections) + + define (cmapWithPrio LogShake recorder) $ \ParseCabalFile file -> do + config <- getPluginConfigAction plId + if not (plcGlobalOn config && plcDiagnosticsOn config) + then pure ([], Nothing) + else do + -- whenever this key is marked as dirty (e.g., when a user writes stuff to it), + -- we rerun this rule because this rule *depends* on GetModificationTime. + (t, mCabalSource) <- use_ GetFileContents file + log' Debug $ LogModificationTime file t + contents <- case mCabalSource of + Just sources -> + pure $ Encoding.encodeUtf8 $ Rope.toText sources + Nothing -> do + liftIO $ BS.readFile $ fromNormalizedFilePath file + + -- Instead of fully reparsing the sources to get a 'GenericPackageDescription', + -- we would much rather re-use the already parsed results of 'ParseCabalFields'. + -- Unfortunately, Cabal-syntax doesn't expose the function 'parseGenericPackageDescription'' + -- which allows us to resume the parsing pipeline with '[Field Position]'. + let (pWarnings, pm) = Parse.parseCabalFileContents contents + let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings + case pm of + Left (_cabalVersion, pErrorNE) -> do + let regexUnknownCabalBefore310 :: T.Text + -- We don't support the cabal version, this should not be an error, as the + -- user did not do anything wrong. Instead we cast it to a warning + regexUnknownCabalBefore310 = "Unsupported cabal-version [0-9]+.[0-9]*" + regexUnknownCabalVersion :: T.Text + regexUnknownCabalVersion = "Unsupported cabal format version in cabal-version field: [0-9]+.[0-9]+" + unsupportedCabalHelpText = + unlines + [ "The used `cabal-version` is not fully supported by this `HLS` binary." + , "Either the `cabal-version` is unknown, or too new for this executable." + , "This means that some functionality might not work as expected." + , "If you face any issues, try downgrading to a supported `cabal-version` or upgrading `HLS` if possible." + , "" + , "Supported versions are: " + <> List.intercalate + ", " + (fmap Cabal.showCabalSpecVersion Data.supportedCabalVersions) + ] + errorDiags = + NE.toList $ + NE.map + ( \pe@(PError pos text) -> + if any + (text =~) + [ regexUnknownCabalBefore310 + , regexUnknownCabalVersion + ] + then + Diagnostics.warningDiagnostic + file + ( Syntax.PWarning Syntax.PWTOther pos $ + unlines + [ text + , unsupportedCabalHelpText + ] + ) + else Diagnostics.errorDiagnostic file pe + ) + pErrorNE + allDiags = errorDiags <> warningDiags + pure (allDiags, Nothing) + Right gpd -> do + pure (warningDiags, Just gpd) + + action $ do + -- Run the cabal kick. This code always runs when 'shakeRestart' is run. + -- Must be careful to not impede the performance too much. Crucial to + -- a snappy IDE experience. + OfInterest.kick + where + log' = logWith recorder diff --git a/plugins/hls-cabal-plugin/test/CabalAdd.hs b/plugins/hls-cabal-plugin/test/CabalAdd.hs index 6517c811fe..8cbac90e43 100644 --- a/plugins/hls-cabal-plugin/test/CabalAdd.hs +++ b/plugins/hls-cabal-plugin/test/CabalAdd.hs @@ -1,56 +1,112 @@ -{-# LANGUAGE OverloadedStrings #-} - +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} module CabalAdd ( - cabalAddTests, + cabalAddDependencyTests, + cabalAddModuleTests, ) where -import Control.Lens ((^.)) -import Control.Lens.Fold ((^?)) -import qualified Data.Maybe as Maybe -import qualified Data.Text as T -import qualified Data.Text.Internal.Search as T -import Distribution.Utils.Generic (safeHead) -import Ide.Plugin.Cabal.CabalAdd (hiddenPackageSuggestion) -import qualified Language.LSP.Protocol.Lens as L -import Language.LSP.Protocol.Types (Diagnostic (..), mkRange) +import Control.Lens ((^.)) +import Control.Lens.Fold ((^?)) +import qualified Data.Maybe as Maybe +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Internal.Search as T +import Distribution.ModuleName (fromString) +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import qualified Distribution.Pretty as Pretty +import Distribution.Types.Component +import Distribution.Utils.Generic (safeHead) +import Ide.Plugin.Cabal.CabalAdd.CodeAction (hiddenPackageSuggestion) +import Ide.Plugin.Cabal.Parse (parseCabalFileContents) +import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Types as J import System.FilePath -import Test.Hls (Session, TestTree, _R, anyMessage, - assertEqual, documentContents, - executeCodeAction, - getAllCodeActions, - getDocumentEdit, liftIO, openDoc, - skipManyTill, testCase, testGroup, - waitForDiagnosticsFrom, (@?=)) +import Test.Hls import Utils -cabalAddTests :: TestTree -cabalAddTests = +cabalAddModuleTests :: TestTree +cabalAddModuleTests = + testGroup + "Add Module" + [ runHaskellTestCaseSession "Add to benchmark" ("cabal-add-module" "library") $ do + let compName = CBenchName "test1" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to executable" ("cabal-add-module" "library") $ do + let compName = CExeName "test" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to test-suite" ("cabal-add-module" "library") $ do + let compName = CTestName "test2" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to library" ("cabal-add-module" "library") $ do + let compName = CLibName $ LSubLibName "test3" + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + , runHaskellTestCaseSession "Add to main library" ("cabal-add-module" "library") $ do + let compName = CLibName LMainLibName + pd <- generateAddDependencyTestSession "test.cabal" "Main.hs" compName + checkModuleAddedTo pd "Main" compName + ] + where + generateAddDependencyTestSession :: FilePath -> FilePath -> ComponentName -> Session PackageDescription + generateAddDependencyTestSession cabalFile haskellFile compName = do + haskellDoc <- openDoc haskellFile "haskell" + cabalDoc <- openDoc cabalFile "cabal" + _ <- waitForDiagnosticsFrom haskellDoc + cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions haskellDoc + let selectedCas = filter (\ca -> (T.pack $ "Add to " <> Pretty.prettyShow compName <> " ") `T.isPrefixOf` (ca ^. L.title)) cas + mapM_ executeCodeAction $ selectedCas + _ <- skipManyTill anyMessage $ getDocumentEdit cabalDoc -- Wait for the changes in cabal file + contents <- documentContents cabalDoc + case parseCabalFileContents $ T.encodeUtf8 contents of + (_, Right gpd) -> pure $ flattenPackageDescription gpd + _ -> liftIO $ assertFailure "could not parse cabal file to gpd" + + -- | Verify that the given module was added to the desired component. + -- Note that we do not care whether it was added to exposed-modules or other-modules of that component. + checkModuleAddedTo :: PackageDescription -> String -> ComponentName -> Session () + checkModuleAddedTo pd modName compName = do + let comp = getComponent pd compName + compModules = case comp of + CLib lib -> explicitLibModules lib + CFLib fLib -> foreignLibModules fLib + CExe exe -> exeModules exe + CTest test -> testModules test + CBench bench -> benchmarkModules bench + testDescription = modName <> " was added to " <> showComponentName compName + liftIO $ assertBool testDescription $ fromString modName `elem` compModules + +cabalAddDependencyTests :: TestTree +cabalAddDependencyTests = testGroup - "CabalAdd Tests" - [ runHaskellTestCaseSession "Code Actions - Can add hidden package to an executable" ("cabal-add-testdata" "cabal-add-exe") + "Add dependency" + [ runHaskellTestCaseSession "Add to executable" ("cabal-add-testdata" "cabal-add-exe") (generateAddDependencyTestSession "cabal-add-exe.cabal" ("src" "Main.hs") "split" [253]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a library" ("cabal-add-testdata" "cabal-add-lib") + , runHaskellTestCaseSession "Add to library" ("cabal-add-testdata" "cabal-add-lib") (generateAddDependencyTestSession "cabal-add-lib.cabal" ("src" "MyLib.hs") "split" [348]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test" ("cabal-add-testdata" "cabal-add-tests") + , runHaskellTestCaseSession "Add to testsuite" ("cabal-add-testdata" "cabal-add-tests") (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "Main.hs") "split" [478]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test with PackageImports" ("cabal-add-testdata" "cabal-add-tests") + , runHaskellTestCaseSession "Add to testsuite with PackageImports" ("cabal-add-testdata" "cabal-add-tests") (generateAddDependencyTestSession "cabal-add-tests.cabal" ("test" "MainPackageImports.hs") "split" [731]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark" ("cabal-add-testdata" "cabal-add-bench") + , runHaskellTestCaseSession "Add to benchmark" ("cabal-add-testdata" "cabal-add-bench") (generateAddDependencyTestSession "cabal-add-bench.cabal" ("bench" "Main.hs") "split" [403]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to an executable, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to executable, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("src" "Main.hs") "split" [269]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "MyLib.hs") "split" [413]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to an internal library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to internal library, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("lib" "InternalLib.hs") "split" [413]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a test, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to testsuite, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("test" "Main.hs") "split" [655]) - , runHaskellTestCaseSession "Code Actions - Can add hidden package to a benchmark, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") + , runHaskellTestCaseSession "Add to benchmark, multiple targets" ("cabal-add-testdata" "cabal-add-multitarget") (generateAddDependencyTestSession "cabal-add-multitarget.cabal" ("bench" "Main.hs") "split" [776]) - , runHaskellTestCaseSession "Code Actions - Guard against HPack" ("cabal-add-testdata" "cabal-add-packageYaml") + , runHaskellTestCaseSession "Guard against HPack" ("cabal-add-testdata" "cabal-add-packageYaml") (generatePackageYAMLTestSession ("src" "Main.hs")) , testHiddenPackageSuggestions "Check CabalAdd's parser, no version" @@ -156,7 +212,7 @@ cabalAddTests = liftIO $ assertEqual (T.unpack dependency <> " isn't found in the cabal file") indicesRes (T.indices dependency contents) testHiddenPackageSuggestions :: String -> [T.Text] -> [(T.Text, T.Text)] -> TestTree testHiddenPackageSuggestions testTitle messages suggestions = - let diags = map (\msg -> messageToDiagnostic msg ) messages + let diags = map (\msg -> messageToDiagnostic msg) messages suggestions' = map (safeHead . hiddenPackageSuggestion) diags assertions = zipWith (@?=) suggestions' (map Just suggestions) testNames = map (\(f, s) -> "Check if " ++ T.unpack f ++ (if s == "" then "" else "-") ++ T.unpack s ++ " was parsed correctly") suggestions @@ -164,20 +220,19 @@ cabalAddTests = in test messageToDiagnostic :: T.Text -> Diagnostic messageToDiagnostic msg = Diagnostic { - _range = mkRange 0 0 0 0 - , _severity = Nothing - , _code = Nothing - , _source = Nothing - , _message = msg - , _relatedInformation = Nothing - , _tags = Nothing - , _codeDescription = Nothing - , _data_ = Nothing + J._range = mkRange 0 0 0 0 + , J._severity = Nothing + , J._code = Nothing + , J._source = Nothing + , J._message = msg + , J._relatedInformation = Nothing + , J._tags = Nothing + , J._codeDescription = Nothing + , J._data_ = Nothing } - generatePackageYAMLTestSession :: FilePath -> Session () - generatePackageYAMLTestSession haskellFile = do + generatePackageYAMLTestSession haskellFile = do hsdoc <- openDoc haskellFile "haskell" _ <- waitForDiagnosticsFrom hsdoc cas <- Maybe.mapMaybe (^? _R) <$> getAllCodeActions hsdoc diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs index fcb85a081e..a390d8982a 100644 --- a/plugins/hls-cabal-plugin/test/Main.hs +++ b/plugins/hls-cabal-plugin/test/Main.hs @@ -6,7 +6,8 @@ module Main ( main, ) where -import CabalAdd (cabalAddTests) +import CabalAdd (cabalAddDependencyTests, + cabalAddModuleTests) import Completer (completerTests) import Context (contextTests) import Control.Lens ((^.)) @@ -58,7 +59,8 @@ cabalParserUnitTests = testGroup "Parsing Cabal" [ testCase "Simple Parsing works" $ do - (warnings, pm) <- Lib.parseCabalFileContents =<< BS.readFile (testDataDir "simple.cabal") + fileContents <- BS.readFile (testDataDir "simple.cabal") + let (warnings, pm) = Lib.parseCabalFileContents fileContents liftIO $ do null warnings @? "Found unexpected warnings" isRight pm @? "Failed to parse GenericPackageDescription" @@ -89,7 +91,7 @@ codeActionUnitTests = maxCompletions = 100 --- ------------------------ ------------------------------------------------ +-- ------------------------------------------------------------------------ -- Integration Tests -- ------------------------------------------------------------------------ @@ -208,7 +210,8 @@ codeActionTests = testGroup "Code Actions" ]) cas mapM_ executeCodeAction selectedCas pure () - , cabalAddTests + , cabalAddDependencyTests + , cabalAddModuleTests ] where getLicenseAction :: T.Text -> [Command |? CodeAction] -> [CodeAction] diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs new file mode 100644 index 0000000000..c2e4af9606 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/Main.hs @@ -0,0 +1,4 @@ +module Main where + +main :: IO () +main = undefined diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal new file mode 100644 index 0000000000..bb6dc95f2f --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/cabal-add-module/library/test.cabal @@ -0,0 +1,26 @@ +cabal-version: 3.0 +name: test +version: 0.1.0.0 +build-type: Simple + +library + hs-source-dirs: . + exposed-modules: + build-depends: base + default-language: Haskell2010 + +executable test + main-is: bla + build-depends: base + +benchmark test1 + type: exitcode-stdio-1.0 + main-is: bla + build-depends: base + +test-suite test2 + type: exitcode-stdio-1.0 + main-is: bla + build-depends: base + +library test3 diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 2b22e7ad8e..8f2e088ad7 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -38,7 +38,7 @@ extra-deps: - trial-optparse-applicative-0.0.0.0 - trial-tomland-0.0.0.0 - validation-selective-0.2.0.0 - - cabal-add-0.1 + - cabal-add-0.2 - cabal-install-parsers-0.6.1.1 - directory-ospath-streaming-0.2.2 @@ -56,8 +56,6 @@ flags: ghc-lib: true retrie: BuildExecutable: false - cabal-add: - cabal-syntax: true # stan dependencies directory-ospath-streaming: os-string: false diff --git a/stack.yaml b/stack.yaml index 0699726771..745ceff332 100644 --- a/stack.yaml +++ b/stack.yaml @@ -36,7 +36,7 @@ extra-deps: - trial-optparse-applicative-0.0.0.0 - trial-tomland-0.0.0.0 - directory-ospath-streaming-0.2.2 - + - cabal-add-0.2 configure-options: ghcide: - --disable-library-for-ghci @@ -50,8 +50,6 @@ flags: ghc-lib: true retrie: BuildExecutable: false - cabal-add: - cabal-syntax: true # stan dependencies directory-ospath-streaming: os-string: false From 7346145920cc581b7c25b6b37097973f2f980d34 Mon Sep 17 00:00:00 2001 From: fendor Date: Thu, 7 Aug 2025 15:27:31 +0200 Subject: [PATCH 84/90] Upgrade to hie-bios 0.17.0 --- cabal.project | 2 +- ghcide/ghcide.cabal | 2 +- stack-lts22.yaml | 2 +- stack.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cabal.project b/cabal.project index fed144eb90..8d8bd080af 100644 --- a/cabal.project +++ b/cabal.project @@ -7,7 +7,7 @@ packages: ./hls-test-utils -index-state: 2025-07-09T16:51:20Z +index-state: 2025-08-08T12:31:54Z tests: True test-show-details: direct diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal index 6c2faa59a2..7dd12f9fef 100644 --- a/ghcide/ghcide.cabal +++ b/ghcide/ghcide.cabal @@ -73,7 +73,7 @@ library , Glob , haddock-library >=1.8 && <1.12 , hashable - , hie-bios ^>=0.16.0 + , hie-bios ^>=0.17.0 , hiedb ^>= 0.7.0.0 , hls-graph == 2.11.0.0 , hls-plugin-api == 2.11.0.0 diff --git a/stack-lts22.yaml b/stack-lts22.yaml index 8f2e088ad7..429125333a 100644 --- a/stack-lts22.yaml +++ b/stack-lts22.yaml @@ -20,7 +20,7 @@ extra-deps: - Diff-0.5 - floskell-0.11.1 - hiedb-0.7.0.0 - - hie-bios-0.16.0 + - hie-bios-0.17.0 - hie-compat-0.3.1.2 - implicit-hie-0.1.4.0 - lsp-2.7.0.0 diff --git a/stack.yaml b/stack.yaml index 745ceff332..43cb239b34 100644 --- a/stack.yaml +++ b/stack.yaml @@ -23,7 +23,7 @@ extra-deps: - hiedb-0.7.0.0 - hie-compat-0.3.1.2 - implicit-hie-0.1.4.0 - - hie-bios-0.16.0 + - hie-bios-0.17.0 - hw-fingertree-0.1.2.1 - monad-dijkstra-0.1.1.5 - retrie-1.2.3 From d18697ce1393c517aaea9b95b4b9691ff35576d5 Mon Sep 17 00:00:00 2001 From: Fendor Date: Sat, 7 Jun 2025 14:54:14 +0200 Subject: [PATCH 85/90] Reload .cabal files when they are modified --- .../session-loader/Development/IDE/Session.hs | 2 +- ghcide/src/Development/IDE/Core/FileStore.hs | 31 +++++++- ghcide/src/Development/IDE/Core/RuleTypes.hs | 8 ++ ghcide/src/Development/IDE/Core/Rules.hs | 9 ++- haskell-language-server.cabal | 1 + .../hls-cabal-plugin/src/Ide/Plugin/Cabal.hs | 14 +++- plugins/hls-cabal-plugin/test/Main.hs | 73 +++++++++++++++++-- plugins/hls-cabal-plugin/test/Utils.hs | 9 +++ .../test/testdata/simple-reload/Main.hs | 9 +++ .../test/testdata/simple-reload/cabal.project | 1 + .../test/testdata/simple-reload/hie.yaml | 2 + .../simple-reload/simple-reload.cabal | 14 ++++ 12 files changed, 162 insertions(+), 11 deletions(-) create mode 100644 plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs create mode 100644 plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project create mode 100644 plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml create mode 100644 plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index fb777338b3..dde1cfdea5 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -499,7 +499,7 @@ loadSessionWithOptions recorder SessionLoadingOptions{..} rootDir que = do hscEnv <- emptyHscEnv ideNc libDir newTargetDfs <- evalGhcEnv hscEnv $ setOptions optHaddockParse cfp opts (hsc_dflags hscEnv) rootDir let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- getDependencyInfo deps + dep_info <- getDependencyInfo (fmap toAbsolutePath deps) -- Now lookup to see whether we are combining with an existing HscEnv -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs index 7dad386ece..e545ec7b14 100644 --- a/ghcide/src/Development/IDE/Core/FileStore.hs +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -78,7 +78,6 @@ import System.FilePath import System.IO.Error import System.IO.Unsafe - data Log = LogCouldNotIdentifyReverseDeps !NormalizedFilePath | LogTypeCheckingReverseDeps !NormalizedFilePath !(Maybe [NormalizedFilePath]) @@ -147,6 +146,29 @@ getModificationTimeImpl missingFileDiags file = do then return (Nothing, ([], Nothing)) else return (Nothing, ([diag], Nothing)) + +getPhysicalModificationTimeRule :: Recorder (WithPriority Log) -> Rules () +getPhysicalModificationTimeRule recorder = defineEarlyCutoff (cmapWithPrio LogShake recorder) $ Rule $ \GetPhysicalModificationTime file -> + getPhysicalModificationTimeImpl file + +getPhysicalModificationTimeImpl + :: NormalizedFilePath + -> Action (Maybe BS.ByteString, ([FileDiagnostic], Maybe FileVersion)) +getPhysicalModificationTimeImpl file = do + let file' = fromNormalizedFilePath file + let wrap time = (Just $ LBS.toStrict $ B.encode $ toRational time, ([], Just $ ModificationTime time)) + + alwaysRerun + + liftIO $ fmap wrap (getModTime file') + `catch` \(e :: IOException) -> do + let err | isDoesNotExistError e = "File does not exist: " ++ file' + | otherwise = "IO error while reading " ++ file' ++ ", " ++ displayException e + diag = ideErrorText file (T.pack err) + if isDoesNotExistError e + then return (Nothing, ([], Nothing)) + else return (Nothing, ([diag], Nothing)) + -- | Interface files cannot be watched, since they live outside the workspace. -- But interface files are private, in that only HLS writes them. -- So we implement watching ourselves, and bypass the need for alwaysRerun. @@ -170,7 +192,11 @@ resetFileStore ideState changes = mask $ \_ -> do case c of LSP.FileChangeType_Changed -- already checked elsewhere | not $ HM.member nfp fois - -> atomically $ deleteValue (shakeExtras ideState) GetModificationTime nfp + -> + atomically $ do + ks <- deleteValue (shakeExtras ideState) GetModificationTime nfp + vs <- deleteValue (shakeExtras ideState) GetPhysicalModificationTime nfp + pure $ ks ++ vs _ -> pure [] @@ -233,6 +259,7 @@ getVersionedTextDoc doc = do fileStoreRules :: Recorder (WithPriority Log) -> (NormalizedFilePath -> Action Bool) -> Rules () fileStoreRules recorder isWatched = do getModificationTimeRule recorder + getPhysicalModificationTimeRule recorder getFileContentsRule recorder addWatchedFileRule recorder isWatched diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs index 63122d4025..a13e6de14c 100644 --- a/ghcide/src/Development/IDE/Core/RuleTypes.hs +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -1,6 +1,7 @@ -- Copyright (c) 2019 The DAML Authors. All rights reserved. -- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE PatternSynonyms #-} @@ -319,6 +320,13 @@ instance Hashable GetModificationTime where instance NFData GetModificationTime +data GetPhysicalModificationTime = GetPhysicalModificationTime + deriving (Generic, Show, Eq) + deriving anyclass (Hashable, NFData) + +-- | Get the modification time of a file on disk, ignoring any version in the VFS. +type instance RuleResult GetPhysicalModificationTime = FileVersion + pattern GetModificationTime :: GetModificationTime pattern GetModificationTime = GetModificationTime_ {missingFileDiagnostics=True} diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs index 071ecafc41..c123c9d4a8 100644 --- a/ghcide/src/Development/IDE/Core/Rules.hs +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -183,6 +183,7 @@ data Log | LogLoadingHieFileFail !FilePath !SomeException | LogLoadingHieFileSuccess !FilePath | LogTypecheckedFOI !NormalizedFilePath + | LogDependencies !NormalizedFilePath [FilePath] deriving Show instance Pretty Log where @@ -207,6 +208,11 @@ instance Pretty Log where <+> "the HLS version being used, the plugins enabled, and if possible the codebase and file which" <+> "triggered this warning." ] + LogDependencies nfp deps -> + vcat + [ "Add dependency" <+> pretty (fromNormalizedFilePath nfp) + , nest 2 $ pretty deps + ] templateHaskellInstructions :: T.Text templateHaskellInstructions = "https://haskell-language-server.readthedocs.io/en/latest/troubleshooting.html#static-binaries" @@ -715,7 +721,8 @@ loadGhcSession recorder ghcSessionDepsConfig = do let nfp = toNormalizedFilePath' fp itExists <- getFileExists nfp when itExists $ void $ do - use_ GetModificationTime nfp + use_ GetPhysicalModificationTime nfp + logWith recorder Logger.Info $ LogDependencies file deps mapM_ addDependency deps let cutoffHash = LBS.toStrict $ B.encode (hash (snd val)) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index dc4cb246d0..096cf04a31 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -318,6 +318,7 @@ test-suite hls-cabal-plugin-tests , haskell-language-server:hls-cabal-plugin , hls-test-utils == 2.11.0.0 , lens + , lsp , lsp-types , text diff --git a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs index 78db726f77..7a2c53ee25 100644 --- a/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs +++ b/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs @@ -2,6 +2,7 @@ {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE TypeFamilies #-} module Ide.Plugin.Cabal (descriptor, haskellInteractionDescriptor, Log (..)) where @@ -145,7 +146,7 @@ descriptor recorder plId = \ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do whenUriFile _uri $ \file -> do log' Debug $ LogDocSaved _uri - restartCabalShakeSession (shakeExtras ide) vfs file "(saved)" $ + restartCabalShakeSessionPhysical (shakeExtras ide) vfs file "(saved)" $ OfInterest.addFileOfInterest ofInterestRecorder ide file OnDisk , mkPluginNotificationHandler LSP.SMethod_TextDocumentDidClose $ \ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do @@ -180,7 +181,16 @@ restartCabalShakeSession :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> Stri restartCabalShakeSession shakeExtras vfs file actionMsg actionBetweenSession = do restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) [] $ do keys <- actionBetweenSession - return (toKey GetModificationTime file : keys) + return (toKey GetModificationTime file:keys) + +-- | Just like 'restartCabalShakeSession', but records that the 'file' has been changed on disk. +-- So, any action that can only work with on-disk modifications may depend on the 'GetPhysicalModificationTime' +-- rule to get re-run if the file changes on disk. +restartCabalShakeSessionPhysical :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> String -> IO [Key] -> IO () +restartCabalShakeSessionPhysical shakeExtras vfs file actionMsg actionBetweenSession = do + restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) [] $ do + keys <- actionBetweenSession + return (toKey GetModificationTime file:toKey GetPhysicalModificationTime file:keys) -- ---------------------------------------------------------------- -- Code Actions diff --git a/plugins/hls-cabal-plugin/test/Main.hs b/plugins/hls-cabal-plugin/test/Main.hs index a390d8982a..43794e753d 100644 --- a/plugins/hls-cabal-plugin/test/Main.hs +++ b/plugins/hls-cabal-plugin/test/Main.hs @@ -1,6 +1,8 @@ {-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} {-# LANGUAGE DisambiguateRecordFields #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} module Main ( main, @@ -17,14 +19,19 @@ import qualified Data.ByteString as BS import Data.Either (isRight) import Data.List.Extra (nubOrdOn) import qualified Data.Maybe as Maybe +import Data.Text (Text) import qualified Data.Text as T +import qualified Data.Text.IO as Text import Definition (gotoDefinitionTests) +import Development.IDE.Test import Ide.Plugin.Cabal.LicenseSuggest (licenseErrorSuggestion) import qualified Ide.Plugin.Cabal.Parse as Lib import qualified Language.LSP.Protocol.Lens as L +import qualified Language.LSP.Protocol.Message as L import Outline (outlineTests) import System.FilePath import Test.Hls +import Test.Hls.FileSystem import Utils main :: IO () @@ -40,6 +47,7 @@ main = do , codeActionTests , gotoDefinitionTests , hoverTests + , reloadOnCabalChangeTests ] -- ------------------------------------------------------------------------ @@ -128,11 +136,6 @@ pluginTests = _ <- applyEdit doc $ TextEdit (Range (Position 3 20) (Position 4 0)) "BSD-3-Clause\n" newDiags <- cabalCaptureKick liftIO $ newDiags @?= [] - , runCabalTestCaseSession "No Diagnostics in .hs files from valid .cabal file" "simple-cabal" $ do - hsDoc <- openDoc "A.hs" "haskell" - expectNoMoreDiagnostics 1 hsDoc "typechecking" - cabalDoc <- openDoc "simple-cabal.cabal" "cabal" - expectNoMoreDiagnostics 1 cabalDoc "parsing" ] ] -- ---------------------------------------------------------------------------- @@ -262,3 +265,63 @@ hoverOnDependencyTests = testGroup "Hover Dependency" h <- getHover doc pos liftIO $ assertBool ("Found hover `" <> show h <> "`") $ Maybe.isNothing h closeDoc doc + +-- ---------------------------------------------------------------------------- +-- Reloading of Haskell files on .cabal changes +-- ---------------------------------------------------------------------------- + +simpleCabalVft :: [FileTree] +simpleCabalVft = + [ copy "hie.yaml" + , copy "simple-reload.cabal" + , copy "Main.hs" + ] + +simpleCabalFs :: VirtualFileTree +simpleCabalFs = mkVirtualFileTree + (testDataDir "simple-reload") + simpleCabalVft + +-- Slow tests +reloadOnCabalChangeTests :: TestTree +reloadOnCabalChangeTests = testGroup "Reload on .cabal changes" + [ runCabalTestCaseSessionVft "Change warnings when .cabal file changes" simpleCabalFs $ do + _ <- openDoc "Main.hs" "haskell" + expectDiagnostics [("Main.hs", [(DiagnosticSeverity_Warning, (8, 0), "Top-level binding with no type signature", Just "GHC-38417")])] + waitForAllProgressDone + cabalDoc <- openDoc "simple-reload.cabal" "cabal" + skipManyTill anyMessage cabalKickDone + saveDoc cabalDoc + [trimming| + cabal-version: 3.4 + name: simple-reload + version: 0.1.0.0 + -- copyright: + build-type: Simple + + common warnings + ghc-options: -Wall -Wno-missing-signatures + + executable simple-reload + import: warnings + main-is: Main.hs + build-depends: base + default-language: Haskell2010 + |] + + expectDiagnostics [("Main.hs", [(DiagnosticSeverity_Warning, (2, 0), "The import of \8216Data.List\8217 is redundant", Nothing)])] + ] + +-- | Persists the given contents to the 'TextDocumentIdentifier' on disk +-- and sends the @textDocument/didSave@ notification. +saveDoc :: TextDocumentIdentifier -> Text -> Session () +saveDoc docId t = do + -- I couldn't figure out how to get the virtual file contents, so we write it + -- to disk and send the 'SMethod_TextDocumentDidSave' notification + case uriToFilePath (docId ^. L.uri) of + Nothing -> pure () + Just fp -> do + liftIO $ Text.writeFile fp t + + let params = DidSaveTextDocumentParams docId Nothing + sendNotification L.SMethod_TextDocumentDidSave params diff --git a/plugins/hls-cabal-plugin/test/Utils.hs b/plugins/hls-cabal-plugin/test/Utils.hs index 2733f94fd0..0264fec2c6 100644 --- a/plugins/hls-cabal-plugin/test/Utils.hs +++ b/plugins/hls-cabal-plugin/test/Utils.hs @@ -14,6 +14,7 @@ import qualified Ide.Plugin.Cabal import Ide.Plugin.Cabal.Completion.Types import System.FilePath import Test.Hls +import Test.Hls.FileSystem (VirtualFileTree) cabalPlugin :: PluginTestDescriptor Ide.Plugin.Cabal.Log @@ -57,6 +58,13 @@ runCabalSession :: FilePath -> Session a -> IO a runCabalSession subdir = failIfSessionTimeout . runSessionWithServer def cabalPlugin (testDataDir subdir) +runCabalTestCaseSessionVft :: TestName -> VirtualFileTree -> Session () -> TestTree +runCabalTestCaseSessionVft title vft = testCase title . runCabalSessionVft vft + +runCabalSessionVft :: VirtualFileTree -> Session a -> IO a +runCabalSessionVft vft = + failIfSessionTimeout . runSessionWithServerInTmpDir def cabalPlugin vft + runHaskellAndCabalSession :: FilePath -> Session a -> IO a runHaskellAndCabalSession subdir = failIfSessionTimeout . runSessionWithServer def (cabalPlugin <> cabalHaskellPlugin) (testDataDir subdir) @@ -82,3 +90,4 @@ cabalCaptureKick = captureKickDiagnostics cabalKickStart cabalKickDone -- | list comparison where the order in the list is irrelevant (@?==) :: (HasCallStack, Ord a, Show a) => [a] -> [a] -> Assertion (@?==) l1 l2 = sort l1 @?= sort l2 + diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs b/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs new file mode 100644 index 0000000000..5f0cdfad80 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/Main.hs @@ -0,0 +1,9 @@ +module Main where + +import Data.List -- Intentionally unused import, used in the testcase + +main :: IO () +main = foo + +-- Missing signature +foo = putStrLn "Hello, World" diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project b/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project new file mode 100644 index 0000000000..e6fdbadb43 --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml b/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml new file mode 100644 index 0000000000..04cd24395e --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/hie.yaml @@ -0,0 +1,2 @@ +cradle: + cabal: diff --git a/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal b/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal new file mode 100644 index 0000000000..359940aebc --- /dev/null +++ b/plugins/hls-cabal-plugin/test/testdata/simple-reload/simple-reload.cabal @@ -0,0 +1,14 @@ +cabal-version: 3.4 +name: simple-reload +version: 0.1.0.0 +-- copyright: +build-type: Simple + +common warnings + ghc-options: -Wall -Wno-unused-imports + +executable simple-reload + import: warnings + main-is: Main.hs + build-depends: base + default-language: Haskell2010 From e3d38b0c4666681dca39901aa9d47b04422c82ff Mon Sep 17 00:00:00 2001 From: fendor Date: Mon, 11 Aug 2025 11:56:27 +0200 Subject: [PATCH 86/90] Bump CI to GHC 9.10.2 (#4687) --- .github/actions/setup-build/action.yml | 2 +- .github/workflows/bench.yml | 2 +- haskell-language-server.cabal | 2 +- .../test/testdata/TPropertyError.ghc910.expected.hs | 8 ++++++-- plugins/hls-refactor-plugin/test/Main.hs | 2 +- 5 files changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/actions/setup-build/action.yml b/.github/actions/setup-build/action.yml index da1ece3140..11f32c09db 100644 --- a/.github/actions/setup-build/action.yml +++ b/.github/actions/setup-build/action.yml @@ -31,7 +31,7 @@ runs: sudo chown -R $USER /usr/local/.ghcup shell: bash - - uses: haskell-actions/setup@v2.7.10 + - uses: haskell-actions/setup@v2.8.1 id: HaskEnvSetup with: ghc-version : ${{ inputs.ghc }} diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 82a50589e4..ba39a21058 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -127,7 +127,7 @@ jobs: example: ['cabal', 'lsp-types'] steps: - - uses: haskell-actions/setup@v2.8.0 + - uses: haskell-actions/setup@v2.8.1 with: ghc-version : ${{ matrix.ghc }} cabal-version: ${{ matrix.cabal }} diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index 096cf04a31..50d4b869ba 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -14,7 +14,7 @@ copyright: The Haskell IDE Team license: Apache-2.0 license-file: LICENSE build-type: Simple -tested-with: GHC == {9.12.2, 9.10.1, 9.8.4, 9.6.7} +tested-with: GHC == {9.12.2, 9.10.2, 9.8.4, 9.6.7} extra-source-files: README.md ChangeLog.md diff --git a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs index 87fbda03f8..089779ea2b 100644 --- a/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs +++ b/plugins/hls-eval-plugin/test/testdata/TPropertyError.ghc910.expected.hs @@ -11,7 +11,11 @@ module TProperty where -- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List -- head, called at :1:27 in interactive:Ghci2 -- HasCallStack backtrace: --- collectBacktraces, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:92:13 in ghc-internal:GHC.Internal.Exception --- toExceptionWithBacktrace, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:128:3 in ghc-internal:GHC.Internal.Exception +-- collectBacktraces, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:169:13 in ghc-internal:GHC.Internal.Exception +-- toExceptionWithBacktrace, called at libraries/ghc-internal/src/GHC/Internal/Exception.hs:204:5 in ghc-internal:GHC.Internal.Exception +-- error, called at libraries/ghc-internal/src/GHC/Internal/List.hs:2030:3 in ghc-internal:GHC.Internal.List +-- errorEmptyList, called at libraries/ghc-internal/src/GHC/Internal/List.hs:96:11 in ghc-internal:GHC.Internal.List +-- badHead, called at libraries/ghc-internal/src/GHC/Internal/List.hs:90:28 in ghc-internal:GHC.Internal.List +-- head, called at :1:27 in interactive:Ghci2 -- -- [] diff --git a/plugins/hls-refactor-plugin/test/Main.hs b/plugins/hls-refactor-plugin/test/Main.hs index 508d480c63..0fb8b61f83 100644 --- a/plugins/hls-refactor-plugin/test/Main.hs +++ b/plugins/hls-refactor-plugin/test/Main.hs @@ -3379,7 +3379,7 @@ addSigActionTests = let executeCodeAction chosenAction modifiedCode <- documentContents doc liftIO $ expectedCode @=? modifiedCode - issue806 = if ghcVersion >= GHC912 then + issue806 = if ghcVersion >= GHC910 then "hello = print" >:: "hello :: GHC.Types.ZonkAny 0 -> IO ()" -- GHC now returns ZonkAny 0 instead of Any. https://gitlab.haskell.org/ghc/ghc/-/issues/25895 else "hello = print" >:: "hello :: GHC.Types.Any -> IO ()" -- Documents current behavior outlined in #806 From dd6b562976f7998200dd143939b844143417035c Mon Sep 17 00:00:00 2001 From: soulomoon Date: Mon, 11 Aug 2025 21:22:24 +0800 Subject: [PATCH 87/90] merge master in --- ghcide/session-loader/Development/IDE/Session.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index 5804ead632..b1f0b63d21 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -832,7 +832,7 @@ packageSetup recorder sessionState newEmptyHscEnv (hieYaml, cfp, opts) = do hscEnv <- newEmptyHscEnv newTargetDfs <- liftIO $ evalGhcEnv hscEnv $ setOptions haddockparse cfp opts (hsc_dflags hscEnv) rootDir let deps = componentDependencies opts ++ maybeToList hieYaml - dep_info <- liftIO $ getDependencyInfo deps + dep_info <- liftIO $ getDependencyInfo (fmap (toAbsolute rootDir) deps) -- Now lookup to see whether we are combining with an existing HscEnv -- or making a new one. The lookup returns the HscEnv and a list of -- information about other components loaded into the HscEnv From 24738d61deddc0853f89b9fab23eb1e03a7b5460 Mon Sep 17 00:00:00 2001 From: soulomoon Date: Wed, 20 Aug 2025 00:43:14 +0800 Subject: [PATCH 88/90] session: move handleBatchLoadSuccess inside restartSession to avoid stale GhcSession results and lost diagnostics --- .../session-loader/Development/IDE/Session.hs | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs index b1f0b63d21..678acb13f4 100644 --- a/ghcide/session-loader/Development/IDE/Session.hs +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -817,10 +817,30 @@ session recorder sessionShake sessionState knownTargetsVar(hieYaml, cfp, opts, l let new_cache = newComponentCache (cmapWithPrio LogSessionGhc recorder) (optExtensions ideOptions) cfp hscEnv all_target_details <- liftIO $ new_cache old_deps new_deps (all_targets, this_flags_map) <- liftIO $ addErrorTargetIfUnknown all_target_details hieYaml cfp - - liftIO $ handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets - -- Typecheck all files in the project on startup - liftIO $ loadKnownTargets recorder sessionShake (optCheckProject ideOptions) knownTargetsVar new_deps all_targets + -- The VFS doesn't change on cradle edits, re-use the old one. + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + liftIO $ do + checkProject <- optCheckProject ideOptions + restartSession sessionShake VFSUnmodified "new component" [] $ do + -- It is necessary to call handleBatchLoadSuccess in restartSession + -- to ensure the GhcSession rule does not return before a new session is started. + -- Otherwise, invalid compilation results may propagate to downstream rules, + -- potentially resulting in lost diagnostics and other issues. + handleBatchLoadSuccess recorder sessionState hieYaml this_flags_map all_targets + keys2 <- invalidateCache sessionShake + keys1 <- extendKnownTargets recorder knownTargetsVar all_targets + -- Typecheck all files in the project on startup + unless (null new_deps || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations all_targets) + void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + shakeExtras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) + return [keys1, keys2] -- | Create a new HscEnv from a hieYaml root and a set of options packageSetup :: Recorder (WithPriority Log) -> SessionState -> SessionM HscEnv -> (Maybe FilePath, NormalizedFilePath, ComponentOptions) -> SessionM ([ComponentInfo], [ComponentInfo]) @@ -897,26 +917,6 @@ extendKnownTargets recorder knownTargetsVar newTargets = do logWith recorder Debug $ LogKnownFilesUpdated (targetMap x) return $ toNoFileKey GetKnownTargets -loadKnownTargets :: Recorder (WithPriority Log) -> SessionShake -> IO Bool -> TVar (Hashed KnownTargets) -> [ComponentInfo] -> [TargetDetails] -> IO () -loadKnownTargets recorder sessionShake getCheckProject knownTargetsVar new_deps targets = do - checkProject <- getCheckProject - - -- The VFS doesn't change on cradle edits, re-use the old one. - -- Invalidate all the existing GhcSession build nodes by restarting the Shake session - restartSession sessionShake VFSUnmodified "new component" [] $ do - keys2 <- invalidateCache sessionShake - keys1 <- extendKnownTargets recorder knownTargetsVar targets - unless (null new_deps || not checkProject) $ do - cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations targets) - void $ enqueueActions sessionShake $ mkDelayedAction "InitialLoad" Debug $ void $ do - mmt <- uses GetModificationTime cfps' - let cs_exist = catMaybes (zipWith (<$) cfps' mmt) - modIfaces <- uses GetModIface cs_exist - -- update exports map - shakeExtras <- getShakeExtras - let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces - liftIO $ atomically $ modifyTVar' (exportsMap shakeExtras) (exportsMap' <>) - return [keys1, keys2] loadCradleWithNotifications :: Recorder (WithPriority Log) -> From f30030c53a2136479b19820f05730c573baa6371 Mon Sep 17 00:00:00 2001 From: Brandon Chinn Date: Tue, 19 Aug 2025 13:44:55 -0700 Subject: [PATCH 89/90] Support fourmolu 0.19.0.0 (#4693) --- haskell-language-server.cabal | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index 50d4b869ba..91adbcbe37 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -1470,7 +1470,7 @@ library hls-fourmolu-plugin hs-source-dirs: plugins/hls-fourmolu-plugin/src build-depends: , filepath - , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 || ^>=0.17 || ^>=0.18 + , fourmolu ^>= 0.14 || ^>= 0.15 || ^>= 0.16 || ^>=0.17 || ^>=0.18 || ^>=0.19 , ghc-boot-th , ghcide == 2.11.0.0 , hls-plugin-api == 2.11.0.0 From 0c73c2ba78c42b7123fa38da113460d51144eed7 Mon Sep 17 00:00:00 2001 From: patrick Date: Fri, 22 Aug 2025 21:08:55 +0800 Subject: [PATCH 90/90] Refactor CoreFile to use fat interface core type (#4700) * Refactor CoreFile to use fat interface type * Update ghcide/src/Development/IDE/GHC/CoreFile.hs * Remove unused TopIfaceBinding type --- ghcide/src/Development/IDE/GHC/CoreFile.hs | 100 +-------------------- 1 file changed, 3 insertions(+), 97 deletions(-) diff --git a/ghcide/src/Development/IDE/GHC/CoreFile.hs b/ghcide/src/Development/IDE/GHC/CoreFile.hs index 99b7328770..8061f22058 100644 --- a/ghcide/src/Development/IDE/GHC/CoreFile.hs +++ b/ghcide/src/Development/IDE/GHC/CoreFile.hs @@ -13,10 +13,7 @@ module Development.IDE.GHC.CoreFile ) where import Control.Monad -import Control.Monad.IO.Class -import Data.Foldable import Data.IORef -import Data.List (isPrefixOf) import Data.Maybe import Development.IDE.GHC.Compat import qualified Development.IDE.GHC.Compat.Util as Util @@ -24,7 +21,6 @@ import GHC.Core import GHC.CoreToIface import GHC.Fingerprint import GHC.Iface.Binary -import GHC.Iface.Env #if MIN_VERSION_ghc(9,11,0) import qualified GHC.Iface.Load as Iface #endif @@ -42,38 +38,11 @@ initBinMemSize = 1024 * 1024 data CoreFile = CoreFile - { cf_bindings :: [TopIfaceBinding IfaceId] + { cf_bindings :: [IfaceBindingX IfaceMaybeRhs IfaceTopBndrInfo] -- ^ The actual core file bindings, deserialized lazily , cf_iface_hash :: !Fingerprint } --- | Like IfaceBinding, but lets us serialize internal names as well -data TopIfaceBinding v - = TopIfaceNonRec v IfaceExpr - | TopIfaceRec [(v, IfaceExpr)] - deriving (Functor, Foldable, Traversable) - --- | GHC doesn't export 'tcIdDetails', 'tcIfaceInfo', or 'tcIfaceType', --- but it does export 'tcIfaceDecl' --- so we use `IfaceDecl` as a container for all of these --- invariant: 'IfaceId' is always a 'IfaceId' constructor -type IfaceId = IfaceDecl - -instance Binary (TopIfaceBinding IfaceId) where - put_ bh (TopIfaceNonRec d e) = do - putByte bh 0 - put_ bh d - put_ bh e - put_ bh (TopIfaceRec vs) = do - putByte bh 1 - put_ bh vs - get bh = do - t <- getByte bh - case t of - 0 -> TopIfaceNonRec <$> get bh <*> get bh - 1 -> TopIfaceRec <$> get bh - _ -> error "Binary TopIfaceBinding" - instance Binary CoreFile where put_ bh (CoreFile core fp) = lazyPut bh core >> put_ bh fp get bh = CoreFile <$> lazyGet bh <*> get bh @@ -118,7 +87,7 @@ codeGutsToCoreFile -> CgGuts -> CoreFile -- In GHC 9.6, implicit binds are tidied and part of core binds -codeGutsToCoreFile hash CgGuts{..} = CoreFile (map (toIfaceTopBind1 cg_module) cg_binds) hash +codeGutsToCoreFile hash CgGuts{..} = CoreFile (map toIfaceTopBind cg_binds) hash getImplicitBinds :: TyCon -> [CoreBind] getImplicitBinds tc = cls_binds ++ getTyConImplicitBinds tc @@ -142,70 +111,7 @@ get_defn identifier = NonRec identifier templ Nothing -> error "get_dfn: no unfolding template" Just x -> x -toIfaceTopBndr1 :: Module -> Id -> IfaceId -toIfaceTopBndr1 mod identifier - = IfaceId (mangleDeclName mod $ getName identifier) - (toIfaceType (idType identifier)) - (toIfaceIdDetails (idDetails identifier)) - (toIfaceIdInfo (idInfo identifier)) - -toIfaceTopBind1 :: Module -> Bind Id -> TopIfaceBinding IfaceId -toIfaceTopBind1 mod (NonRec b r) = TopIfaceNonRec (toIfaceTopBndr1 mod b) (toIfaceExpr r) -toIfaceTopBind1 mod (Rec prs) = TopIfaceRec [(toIfaceTopBndr1 mod b, toIfaceExpr r) | (b,r) <- prs] - typecheckCoreFile :: Module -> IORef TypeEnv -> CoreFile -> IfG CoreProgram typecheckCoreFile this_mod type_var (CoreFile prepd_binding _) = initIfaceLcl this_mod (text "typecheckCoreFile") NotBoot $ do - tcTopIfaceBindings1 type_var prepd_binding - --- | Internal names can't be serialized, so we mange them --- to an external name and restore at deserialization time --- This is necessary because we rely on stuffing TopIfaceBindings into --- a IfaceId because we don't have access to 'tcIfaceType' etc.. -mangleDeclName :: Module -> Name -> Name -mangleDeclName mod name - | isExternalName name = name - | otherwise = mkExternalName (nameUnique name) (mangleModule mod) (nameOccName name) (nameSrcSpan name) - --- | Mangle the module name too to avoid conflicts -mangleModule :: Module -> Module -mangleModule mod = mkModule (moduleUnit mod) (mkModuleName $ "GHCIDEINTERNAL" ++ moduleNameString (moduleName mod)) - -isGhcideModule :: Module -> Bool -isGhcideModule mod = "GHCIDEINTERNAL" `isPrefixOf` (moduleNameString $ moduleName mod) - --- Is this a fake external name that we need to make into an internal name? -isGhcideName :: Name -> Bool -isGhcideName = isGhcideModule . nameModule - -tcTopIfaceBindings1 :: IORef TypeEnv -> [TopIfaceBinding IfaceId] - -> IfL [CoreBind] -tcTopIfaceBindings1 ty_var ver_decls - = do - int <- mapM (traverse tcIfaceId) ver_decls - let all_ids = concatMap toList int - liftIO $ modifyIORef ty_var (flip extendTypeEnvList $ map AnId all_ids) - extendIfaceIdEnv all_ids $ mapM tc_iface_bindings int - -tcIfaceId :: IfaceId -> IfL Id -tcIfaceId = fmap getIfaceId . tcIfaceDecl False <=< unmangle_decl_name - where - unmangle_decl_name ifid@IfaceId{ ifName = name } - -- Check if the name is mangled - | isGhcideName name = do - name' <- newIfaceName (mkVarOcc $ getOccString name) - pure $ ifid{ ifName = name' } - | otherwise = pure ifid - unmangle_decl_name _ifid = error "tcIfaceId: got non IfaceId: " - -- invariant: 'IfaceId' is always a 'IfaceId' constructor - getIfaceId (AnId identifier) = identifier - getIfaceId _ = error "tcIfaceId: got non Id" - -tc_iface_bindings :: TopIfaceBinding Id -> IfL CoreBind -tc_iface_bindings (TopIfaceNonRec v e) = do - e' <- tcIfaceExpr e - pure $ NonRec v e' -tc_iface_bindings (TopIfaceRec vs) = do - vs' <- traverse (\(v, e) -> (v,) <$> tcIfaceExpr e) vs - pure $ Rec vs' - + tcTopIfaceBindings type_var prepd_binding