diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 04ec240c57..eb80186ae9 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -6,7 +6,7 @@ jobs: linter-check: name: linter-check runs-on: ubuntu-latest - container: ghcr.io/fossas/haskell-dev-tools:9.0.2 + container: ghcr.io/fossas/haskell-dev-tools:9.4.7 steps: - uses: actions-rs/toolchain@v1 @@ -41,7 +41,7 @@ jobs: format-check: name: formatter-check runs-on: ubuntu-latest - container: ghcr.io/fossas/haskell-dev-tools:9.0.2 + container: ghcr.io/fossas/haskell-dev-tools:9.4.7 steps: - uses: actions-rs/toolchain@v1 @@ -60,7 +60,7 @@ jobs: cabal-format-check: name: cabal-format-check runs-on: ubuntu-latest - container: ghcr.io/fossas/haskell-dev-tools:9.0.2 + container: ghcr.io/fossas/haskell-dev-tools:9.4.7 steps: - uses: actions/checkout@v2 @@ -74,14 +74,23 @@ jobs: name: cabal-install-check runs-on: ubuntu-latest # Be sure to update the env var below - container: ghcr.io/fossas/haskell-dev-tools:9.0.2 + container: ghcr.io/fossas/haskell-dev-tools:9.4.7 env: - GHC_VERSION: '9.0.2' + GHC_VERSION: '9.4.7' steps: - uses: actions/checkout@v3 + # For some reason newer cabal or newer ghc doesn't prioritize cli options like -Wwarn over project file -Werror. + # Because -Werror is specified in the project file, we get errors due to missing vendor binaries. + # So edit the project file temporarily to not care about warnings. + # This is what -Wwarn is supposed to do below. + # We don't really lose anything here since it's run with -Werror in other workflows. + - name: Delete ghc-options -Werror + run: | + sed -i '/ghc-options: -Werror/d' cabal.project.ci.linux + # Run `cabal install`. - uses: actions/cache@v3 name: Cache cabal store @@ -97,8 +106,8 @@ jobs: run: | apk add xz-dev bzip2-dev bzip2-static cabal update - # -Wwarn switch because warnings about missing vendor bins would be emitted - cabal install --project-file=cabal.project.ci.linux --ghc-options="-Wwarn" + + cabal install --project=cabal.project.ci.linux --ghc-options="-Wwarn" schema-lint-check: name: "schema lint check" diff --git a/.hlint.yaml b/.hlint.yaml index e1377c91b2..5ace677d51 100644 --- a/.hlint.yaml +++ b/.hlint.yaml @@ -3,6 +3,13 @@ - ignore: {name: "Redundant bracket"} - ignore: {name: "Use list comprehension"} +# Hlint 3.6.1 has a bug where typed TH splices are not recognized so it reports +# TemplateHaskell as an unused pragma: https://github.com/ndmitchell/hlint/issues/1531 +# A fix is merged, but has not yet been released. +# Remove the ignore directive when this commit makes it into a release and is part of haskell-dev-tools. +# https://github.com/ndmitchell/hlint/commit/505a4d57b972f3ba605ad7a59721cef1f3d98a84 +- ignore: {name: "Unused LANGUAGE pragma", within: [App.Version.TH, App.Version]} + # Import Preferences - modules: - {name: Data.Set, as: Set, message: "Use complete name for qualified import of Set"} diff --git a/Makefile b/Makefile index 9b5cfabcf9..b258c3492e 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ current_dir := $(dir $(abspath $(lastword $(MAKEFILE_LIST)))) FMT_OPTS := -co -XTypeApplications -o -XImportQualifiedPost FIND_OPTS := src test integration-test -type f -name '*.hs' -GHC_VERSION := 9.0.2 +GHC_VERSION := 9.4.7 DEV_TOOLS := ghcr.io/fossas/haskell-dev-tools:${GHC_VERSION} MOUNTED_DEV_TOOLS_OPTS := --rm MOUNTED_DEV_TOOLS_OPTS += --mount "type=bind,source=${current_dir},target=/fossa-cli" diff --git a/fourmolu.yaml b/fourmolu.yaml index ef44942dad..662d9c4306 100644 --- a/fourmolu.yaml +++ b/fourmolu.yaml @@ -2,7 +2,29 @@ indentation: 2 comma-style: leading # for lists, tuples etc. - can also be 'trailing' record-brace-space: false # rec {x = 1} vs. rec{x = 1} indent-wheres: true # 'false' means save space by only half-indenting the 'where' keyword -diff-friendly-import-export: true # 'false' uses Ormolu-style lists +import-export-style: diff-friendly respectful: true # don't be too opinionated about newlines etc. haddock-style: single-line # '--' vs. '{-' newlines-between-decls: 1 # number of newlines between top-level declarations +single-constraint-parens: auto +let-style: inline + +# The fixity of operators affects how Fourmolu formats them. +# Without this directive, we'd get really wonky corrections that would do things like turn this: +# ``` +# exprA && exprB +# ``` +# Into +# ``` +# expr +# && exprB +# `` +# For operator heavy code, like Aeson parsers, it would be even worse than this. +# Fourmolu says that it can automatically detect fixities in most cases, +# but I had to specify these manually to get reasonable formatting. +# Docs: https://github.com/fourmolu/fourmolu#language-extensions-dependencies-and-fixities +# If you have issues with operator formatting, usually you can find the right fixity in the haddocks. +fixities: + - infixl 3 <|> + - infix 4 == + - infixr 3 && diff --git a/spectrometer.cabal b/spectrometer.cabal index 211138c7f6..08e654f2b2 100644 --- a/spectrometer.cabal +++ b/spectrometer.cabal @@ -110,7 +110,7 @@ common deps , path ^>=0.9.0 , path-io ^>=1.8.0 , pretty-simple ^>=4.1.1.0 - , prettyprinter >=1.6 && <1.8 + , prettyprinter >=1.6 && <1.8 , prettyprinter-ansi-terminal ^>=1.1.1 , random ^>=1.2.0 , raw-strings-qq ^>=1.1 @@ -124,7 +124,7 @@ common deps , template-haskell , text ^>=2.0.0 , th-lift-instances ^>=0.1.17 - , time >=1.9 && <1.13 + , time >=1.9 && <1.13 , tomland ^>=1.3.3.0 , transformers , typed-process ^>=0.2.6 diff --git a/src/App/Fossa/EmbeddedBinary.hs b/src/App/Fossa/EmbeddedBinary.hs index 7dc0a9d8fb..7dfaa9afbb 100644 --- a/src/App/Fossa/EmbeddedBinary.hs +++ b/src/App/Fossa/EmbeddedBinary.hs @@ -198,7 +198,7 @@ embeddedBinaryThemisIndex :: ByteString embeddedBinaryThemisIndex = $(embedFileIfExists "vendor-bins/index.gob.xz") themisVersion :: Text -themisVersion = $$themisVersionQ +themisVersion = $$(themisVersionQ) embeddedBinaryLernie :: ByteString embeddedBinaryLernie = $(embedFileIfExists "vendor-bins/lernie") diff --git a/src/App/Fossa/VSI/Types.hs b/src/App/Fossa/VSI/Types.hs index c44a40bc36..43cb5d9727 100644 --- a/src/App/Fossa/VSI/Types.hs +++ b/src/App/Fossa/VSI/Types.hs @@ -154,15 +154,15 @@ userDefinedFetcher = "iat" isTopLevelProject :: Locator -> Bool isTopLevelProject loc = locatorFetcher loc == depTypeToFetcher CustomType --- |A path returned on inferences from sherlock-api. +-- | A path returned on inferences from sherlock-api. -- --- While 'VsiFilePath's look like filepaths, we treat them as text rather than 'FilePath' or 'Path's. --- Those packages behave based on on the platform they're compiled on, while VSI always uses unix-style paths. +-- While 'VsiFilePath's look like filepaths, we treat them as text rather than 'FilePath' or 'Path's. +-- Those packages behave based on on the platform they're compiled on, while VSI always uses unix-style paths. newtype VsiFilePath = VsiFilePath {unVsiFilePath :: Text} deriving newtype (Eq, Ord, Show, FromJSON, ToJSON) --- |A path for a VSI rule. --- During processing we change a list of file paths to directory paths for inclusion in rules. +-- | A path for a VSI rule. +-- During processing we change a list of file paths to directory paths for inclusion in rules. newtype VsiRulePath = VsiRulePath {unVsiRulePath :: Text} deriving newtype (Eq, Ord, Show, ToJSON, ToJSONKey, ToString) @@ -220,10 +220,10 @@ generateRules inferenceBody = do Nothing -> m Just inferenceLocator' -> Map.insertWith (<>) inferenceLocator' (NE.singleton filePath) m --- |Get the shortest prefixes for every filepath in a list. --- This works by first sorting the list lexicographically and then storing only the initial path that prefixes each group of filepaths. +-- | Get the shortest prefixes for every filepath in a list. +-- This works by first sorting the list lexicographically and then storing only the initial path that prefixes each group of filepaths. -- --- Ex: ["/foo/bar/baz.c", "/foo/hello.c". "/other/dir/world.c"] -> ["/foo", "/other/dir"] +-- Ex: ["/foo/bar/baz.c", "/foo/hello.c". "/other/dir/world.c"] -> ["/foo", "/other/dir"] getPrefixes :: NE.NonEmpty VsiFilePath -> NE.NonEmpty VsiRulePath getPrefixes paths = snd . foldr accumPrefixes startVal $ (NE.tail sorted) where diff --git a/src/Container/OsRelease.hs b/src/Container/OsRelease.hs index 6ceefa23c1..331d0fe9ad 100644 --- a/src/Container/OsRelease.hs +++ b/src/Container/OsRelease.hs @@ -167,8 +167,7 @@ osReleaseParser = do properties <- propertiesParser let nameId = asum - ( map (`Map.lookup` properties) ["ID"] - ++ [Just "linux"] -- We should default to linux as last resort per spec! + ( (`Map.lookup` properties) "ID" : [Just "linux"] -- We should default to linux as last resort per spec! ) let versionId = Map.lookup "VERSION_ID" properties diff --git a/src/Container/TarballReadFs.hs b/src/Container/TarballReadFs.hs index 5d63f02941..fa9315547f 100644 --- a/src/Container/TarballReadFs.hs +++ b/src/Container/TarballReadFs.hs @@ -288,15 +288,15 @@ runTarballReadFSIO fs tarball = interpret $ \case ReadRedactedContentsBS' file -> do mapRight Redacted <$> readContentBS fs tarball 0 file - `catchingIO` FileReadError (toString file) + `catchingIO` FileReadError (toString file) ReadRedactedContentsBSLimit' file limit -> do mapRight Redacted <$> readContentsBSLimit fs tarball file limit - `catchingIO` FileReadError (toString file) + `catchingIO` FileReadError (toString file) ReadRedactedContentsText' file -> do mapRight Redacted <$> readContentText fs tarball file - `catchingIO` FileReadError (toString file) + `catchingIO` FileReadError (toString file) ResolveFile' dir path -> resolveFile fs tarball dir path `catchingIO` ResolveError (toString dir) (toString path) diff --git a/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs b/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs index 886931da01..15547e9e38 100644 --- a/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs +++ b/src/Control/Carrier/FossaApiClient/Internal/FossaAPIV1.hs @@ -604,7 +604,7 @@ uploadNativeContainerScan apiOpts ProjectRevision{..} metadata scan = fossaReq $ resp <- req POST (containerUploadUrl baseUrl) (ReqBodyJson scan) jsonResponse (baseOpts <> opts) pure $ responseBody resp --- |Replacement for @Data.HTTP.Req.req@ that additionally logs information about a request in a debug bundle. +-- | Replacement for @Data.HTTP.Req.req@ that additionally logs information about a request in a debug bundle. req :: forall method body sig m scheme b. ( Req.HttpBodyAllowed (Req.AllowsBody method) (Req.ProvidesBody body) diff --git a/src/Data/Rpm/DbHeaderBlob.hs b/src/Data/Rpm/DbHeaderBlob.hs index 0503865e67..4c41d12590 100644 --- a/src/Data/Rpm/DbHeaderBlob.hs +++ b/src/Data/Rpm/DbHeaderBlob.hs @@ -1,5 +1,5 @@ -- | ---Description: Functions for reading RPM information out of binary package headers +-- Description: Functions for reading RPM information out of binary package headers module Data.Rpm.DbHeaderBlob ( PkgInfo (..), PkgConversionError (..), diff --git a/src/Data/Rpm/DbHeaderBlob/Internal.hs b/src/Data/Rpm/DbHeaderBlob/Internal.hs index c74ebe71a9..0c2ee8954a 100644 --- a/src/Data/Rpm/DbHeaderBlob/Internal.hs +++ b/src/Data/Rpm/DbHeaderBlob/Internal.hs @@ -283,7 +283,7 @@ getV3RegionCount entryMetadatas dataLength dataStart blobData = do trd :: (a, b, c) -> c trd (_, _, c) = c --- |'runGetOrFail' but don't include parse locations/remaining data in output +-- | 'runGetOrFail' but don't include parse locations/remaining data in output runGetOrFail' :: Get d -> BLS.ByteString -> Either String d runGetOrFail' r = bimap trd trd . runGetOrFail r diff --git a/src/Data/String/Conversion.hs b/src/Data/String/Conversion.hs index 4f5ac4edf6..6df16933f5 100644 --- a/src/Data/String/Conversion.hs +++ b/src/Data/String/Conversion.hs @@ -72,10 +72,10 @@ instance ToText Text.Text where instance ToText TL.Text where toText = TL.toStrict -instance TypeError ( 'Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToText BS.ByteString where +instance TypeError ('Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToText BS.ByteString where toText = error "unreachable" -instance TypeError ( 'Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToText BL.ByteString where +instance TypeError ('Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToText BL.ByteString where toText = error "unreachable" instance ToText (Path b t) where @@ -89,8 +89,8 @@ instance ToText (SomeBase t) where instance ToText Key where toText = Key.toText --- |Avoid this function in favor of using 'toText' or some other direct conversion if possible. --- Unfortunately sometimes this is the best way to convert something to text. +-- | Avoid this function in favor of using 'toText' or some other direct conversion if possible. +-- Unfortunately sometimes this is the best way to convert something to text. showText :: Show a => a -> Text.Text showText = toText . show @@ -108,10 +108,10 @@ instance ToLText Text.Text where instance ToLText TL.Text where toLText = id -instance TypeError ( 'Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToLText BS.ByteString where +instance TypeError ('Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToLText BS.ByteString where toLText = error "unreachable" -instance TypeError ( 'Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToLText BL.ByteString where +instance TypeError ('Text "Error: Use encodeUtf8/decodeUtf8 instead") => ToLText BL.ByteString where toLText = error "unreachable" instance ToLText (Path b t) where @@ -128,10 +128,10 @@ instance ToString Text.Text where instance ToString TL.Text where toString = TL.unpack -instance TypeError ( 'Text "Error: Use decodeUtf8 instead") => ToString BS.ByteString where +instance TypeError ('Text "Error: Use decodeUtf8 instead") => ToString BS.ByteString where toString = error "unreachable" -instance TypeError ( 'Text "Error: Use decodeUtf8 instead") => ToString BL.ByteString where +instance TypeError ('Text "Error: Use decodeUtf8 instead") => ToString BL.ByteString where toString = error "unreachable" instance ToString (Path b t) where diff --git a/src/Discovery/Archive.hs b/src/Discovery/Archive.hs index 4b36fe25f6..4e092acfa6 100644 --- a/src/Discovery/Archive.hs +++ b/src/Discovery/Archive.hs @@ -98,7 +98,7 @@ discover go dir renderAncestry = context "Finding archives" $ do traverse_ (\file -> forkTask $ withArchive' file (process file)) files pure WalkContinue --- |Given a file extension, return an extraction function for that file type. +-- | Given a file extension, return an extraction function for that file type. selectUnarchiver :: Has (Lift IO) sig m => String -> Maybe (Path Abs Dir -> Path Abs File -> m ()) selectUnarchiver file | ".tar" `isSuffixOf` file = Just extractTar diff --git a/src/Effect/Grapher.hs b/src/Effect/Grapher.hs index 077ec95a88..825995785b 100644 --- a/src/Effect/Grapher.hs +++ b/src/Effect/Grapher.hs @@ -131,17 +131,17 @@ withLabeling f act = do (graph, (labels, _)) <- runGrapher . runState Map.empty $ act pure (unlabel f labels graph) --- |This function will run a 'LabeledGrapherC', returning the graph as well as the labels with no post-processing. +-- | This function will run a 'LabeledGrapherC', returning the graph as well as the labels with no post-processing. -- --- Occasionally it isn't possible to transform a graph node and its labels in isolation like 'withLabeling' does. --- Suppose 'withLabeling' were given a function from =(ty -> Set lbl -> res)= which maps several =ty= to one =res=. --- There could be multiple =res='s which should be the same, but aren't because each was created from a different =ty= and its associated =res=s. +-- Occasionally it isn't possible to transform a graph node and its labels in isolation like 'withLabeling' does. +-- Suppose 'withLabeling' were given a function from =(ty -> Set lbl -> res)= which maps several =ty= to one =res=. +-- There could be multiple =res='s which should be the same, but aren't because each was created from a different =ty= and its associated =res=s. -- --- A concrete example of this is in the GoListPackages tactic. --- Multiple packages may be associated with production or development envs, but all map to a single module. --- It is necessary then to collect the labels from multiple packages (=ty=) before producing the final 'Dependency' (=res=). +-- A concrete example of this is in the GoListPackages tactic. +-- Multiple packages may be associated with production or development envs, but all map to a single module. +-- It is necessary then to collect the labels from multiple packages (=ty=) before producing the final 'Dependency' (=res=). -- --- Prefer 'withLabeling' if it is enough to examine each =ty= and its set of labels in isolation. +-- Prefer 'withLabeling' if it is enough to examine each =ty= and its set of labels in isolation. runLabeledGrapher :: (Ord ty, Algebra sig m) => LabeledGrapherC ty lbl m a -> m (G.Graphing ty, Labels ty lbl) runLabeledGrapher act = do (graph, (labels, _)) <- runGrapher . runState Map.empty $ act diff --git a/src/Effect/ReadFS.hs b/src/Effect/ReadFS.hs index 72db9a097f..860951b899 100644 --- a/src/Effect/ReadFS.hs +++ b/src/Effect/ReadFS.hs @@ -383,15 +383,15 @@ runReadFSIO = interpret $ \case ReadRedactedContentsBS' file -> do mapRight Redacted <$> BS.readFile (toString file) - `catchingIO` FileReadError (toString file) + `catchingIO` FileReadError (toString file) ReadRedactedContentsBSLimit' file limit -> do mapRight Redacted <$> readContentsBSLimitIO file limit - `catchingIO` FileReadError (toString file) + `catchingIO` FileReadError (toString file) ReadRedactedContentsText' file -> do mapRight Redacted <$> (decodeUtf8 <$> BS.readFile (toString file)) - `catchingIO` FileReadError (toString file) + `catchingIO` FileReadError (toString file) ResolveFile' dir path -> do PIO.resolveFile dir (toString path) `catchingIO` ResolveError (toString dir) (toString path) diff --git a/src/Srclib/Types.hs b/src/Srclib/Types.hs index bee486e66f..cb4743a9a4 100644 --- a/src/Srclib/Types.hs +++ b/src/Srclib/Types.hs @@ -48,13 +48,13 @@ instance ToText LicenseScanType where instance ToJSON LicenseScanType where toJSON = toJSON . toText --- |This type is meant to represent the paths in a project where a particular set of dependencies were discovered. --- For example, in a project directory that has a @go.mod@ the OriginPath might be 'foo/bar/go.mod'. --- In a project with VSI dependencies the OriginPath would be the directory the dep was found in, such as 'vendored/zlib/' +-- | This type is meant to represent the paths in a project where a particular set of dependencies were discovered. +-- For example, in a project directory that has a @go.mod@ the OriginPath might be 'foo/bar/go.mod'. +-- In a project with VSI dependencies the OriginPath would be the directory the dep was found in, such as 'vendored/zlib/' -- --- OriginPaths were previously `SomeBase File`, however with support for VSI OriginPaths can now be a directory in addition to a file path. --- The reason we cannot use `SomePath` for this is that outputting an `OriginPath` to JSON in a form like @/foo/bar/path_end@ doesn't say whether the path is a file or directory which is required for parsing to a `SomePath` in `FromJSON`. --- This type and its exported smart constructors describe that OriginPath is a path, but has no information about whether the path is a file or directory. +-- OriginPaths were previously `SomeBase File`, however with support for VSI OriginPaths can now be a directory in addition to a file path. +-- The reason we cannot use `SomePath` for this is that outputting an `OriginPath` to JSON in a form like @/foo/bar/path_end@ doesn't say whether the path is a file or directory which is required for parsing to a `SomePath` in `FromJSON`. +-- This type and its exported smart constructors describe that OriginPath is a path, but has no information about whether the path is a file or directory. newtype OriginPath = OriginPath FilePath deriving newtype (Eq, Ord, Show, ToJSON, FromJSON, ToText) diff --git a/src/Strategy/BerkeleyDB/Internal.hs b/src/Strategy/BerkeleyDB/Internal.hs index a3aac2517a..c76d92d223 100644 --- a/src/Strategy/BerkeleyDB/Internal.hs +++ b/src/Strategy/BerkeleyDB/Internal.hs @@ -49,8 +49,8 @@ readBerkeleyDB file = withBerkeleyBinary $ \bin -> do -- Handle the JSON response. (bdbJsonOutput :: [Text]) <- context "read raw blobs" . execJson' cwd (bdbCommand bin) . decodeUtf8 $ B64.encode fileContent - bdbByteOutput <- context "decode base64" . traverse fromEitherShow $ B64.decode <$> fmap encodeUtf8 bdbJsonOutput - entries <- context "parse blobs" . traverse fromEitherShow $ readPackageInfo <$> fmap BSL.fromStrict bdbByteOutput + bdbByteOutput <- context "decode base64" (traverse (fromEitherShow . B64.decode . encodeUtf8) bdbJsonOutput) + entries <- context "parse blobs" (traverse (fromEitherShow . readPackageInfo . BSL.fromStrict) bdbByteOutput) context "parse package info" $ traverse parsePkgInfo entries bdbCommand :: BinaryPaths -> Command diff --git a/src/Strategy/Bundler.hs b/src/Strategy/Bundler.hs index b9033b5222..8edd64bdf8 100644 --- a/src/Strategy/Bundler.hs +++ b/src/Strategy/Bundler.hs @@ -100,10 +100,8 @@ findLicenses gemspecPath = do pure [LicenseResult gemSpecFp (License UnknownType <$> licenses)] where isLicenseKey Assignment{label = label} = - "license" - `isSuffixOf` label - || "licenses" - `isSuffixOf` label + "license" `isSuffixOf` label + || "licenses" `isSuffixOf` label gemSpecFp = toFilePath gemspecPath mkProject :: BundlerProject -> DiscoveredProject BundlerProject diff --git a/src/Strategy/Cargo.hs b/src/Strategy/Cargo.hs index 7bbb483257..89abb48283 100644 --- a/src/Strategy/Cargo.hs +++ b/src/Strategy/Cargo.hs @@ -229,25 +229,25 @@ cargoPackageCodec = <$> dioptional (Toml.text "license") .= license <*> dioptional (Toml.string "license-file") .= cargoLicenseFile --- |Representation of a Cargo.toml file. See --- [here](https://doc.rust-lang.org/cargo/reference/manifest.html) --- for a description of this format. +-- | Representation of a Cargo.toml file. See +-- [here](https://doc.rust-lang.org/cargo/reference/manifest.html) +-- for a description of this format. newtype CargoToml = CargoToml {cargoPackage :: CargoPackage} deriving (Eq, Show) cargoTomlCodec :: TomlCodec CargoToml cargoTomlCodec = diwrap (Toml.table cargoPackageCodec "package") --- ^^ The above is a bit obscure. It's generating a TomlCodec CargoPackage and --- then using 'diwrap'/Coercible to make a TomlCodec CargoToml. I can't use --- 'CargoToml <$>' because TomlCodec aliases (Codec a a) and only (Codec a) --- has a Functor instance, so I'd end up with a (Codec CargoPackage CargoToml). +-- ^ ^ The above is a bit obscure. It's generating a TomlCodec CargoPackage and +-- then using 'diwrap'/Coercible to make a TomlCodec CargoToml. I can't use +-- 'CargoToml <$>' because TomlCodec aliases (Codec a a) and only (Codec a) +-- has a Functor instance, so I'd end up with a (Codec CargoPackage CargoToml). instance LicenseAnalyzeProject CargoProject where licenseAnalyzeProject = analyzeLicenses . cargoToml --- |Analyze a Cargo.toml for license information. The format is documented --- (here)[https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields] +-- | Analyze a Cargo.toml for license information. The format is documented +-- (here)[https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields] analyzeLicenses :: (Has ReadFS sig m, Has Diagnostics sig m) => Path Abs File -> m [LicenseResult] analyzeLicenses tomlPath = do pkg <- cargoPackage <$> readContentsToml cargoTomlCodec tomlPath diff --git a/src/Strategy/Cocoapods.hs b/src/Strategy/Cocoapods.hs index 616d279f54..ce3eb9e8fa 100644 --- a/src/Strategy/Cocoapods.hs +++ b/src/Strategy/Cocoapods.hs @@ -83,9 +83,9 @@ instance AnalyzeProject CocoapodsProject where instance LicenseAnalyzeProject CocoapodsProject where licenseAnalyzeProject = traverse readLicense . cocoapodsSpecFiles --- |For now, if the 'license' assignment statement is a dictionary this --- code only extracts the value in the `:type` key. It also only looks at --- the first `license` assignment it finds in the spec file. +-- | For now, if the 'license' assignment statement is a dictionary this +-- code only extracts the value in the `:type` key. It also only looks at +-- the first `license` assignment it finds in the spec file. readLicense :: (Has ReadFS sig m, Has Diagnostics sig m) => Path Abs File -> m LicenseResult readLicense specFile = do assignments <- readContentsParser (readAssignments podspecAssignmentValuesP) specFile diff --git a/src/Strategy/Elixir/MixTree.hs b/src/Strategy/Elixir/MixTree.hs index 1a6483fdb3..badc3d9c30 100644 --- a/src/Strategy/Elixir/MixTree.hs +++ b/src/Strategy/Elixir/MixTree.hs @@ -219,7 +219,7 @@ parseDepSCM = try parseDepHex <|> parseDepSCMGit <|> parseDepSCMOther -- | True if a version is not resolved in `MixDepResolved`, otherwise False. -- This can happen, if dependencies are not retrieved or compiled. missingResolvedVersions :: Map PackageName MixDepResolved -> Bool -missingResolvedVersions mdr = any isNothing (depResolvedVersion <$> map snd (Map.toList mdr)) +missingResolvedVersions mdr = any (isNothing . depResolvedVersion . snd) (Map.toList mdr) -- Parses `mix deps` output mixDepsCmdOutputParser :: Parser (Map PackageName MixDepResolved) diff --git a/src/Strategy/Go/GoListPackages.hs b/src/Strategy/Go/GoListPackages.hs index d4a0e1eeff..84e28572b0 100644 --- a/src/Strategy/Go/GoListPackages.hs +++ b/src/Strategy/Go/GoListPackages.hs @@ -58,7 +58,7 @@ import Types (GraphBreadth (Complete)) -- * Types --- |Path used in a Go project to import a package. +-- | Path used in a Go project to import a package. newtype ImportPath = ImportPath Text deriving (Eq, Ord, Show, ToText, Generic, Hashable) @@ -100,7 +100,7 @@ instance FromJSON GoPackage where <*> obj .:? "Error" <*> (obj .:? "TestImports" .!= []) --- |Get module info from a 'GoPackage', respecting replacements +-- | Get module info from a 'GoPackage', respecting replacements getFinalModuleInfo :: GoPackage -> Maybe GoModule getFinalModuleInfo GoPackage{moduleInfo} = (moduleInfo >>= replacement) <|> moduleInfo @@ -287,7 +287,7 @@ toVerConstraint v = case v of type Parser = Parsec Void Text --- |Convenience function to parse a go module version that is potentially surrounded by spaces. +-- | Convenience function to parse a go module version that is potentially surrounded by spaces. toGoModVersion :: Text -> Maybe PackageVersion toGoModVersion modVersion = case parse (parsePackageVersion lexeme) "go module version" modVersion of Left err -> fail $ errorBundlePretty err diff --git a/src/Strategy/Maven/Pom/PomFile.hs b/src/Strategy/Maven/Pom/PomFile.hs index 4702fd9231..df52638790 100644 --- a/src/Strategy/Maven/Pom/PomFile.hs +++ b/src/Strategy/Maven/Pom/PomFile.hs @@ -189,15 +189,15 @@ instance FromXML RawPom where <*> optional (child "version" el) <*> optional (child "name" el) <*> optional (child "properties" el) - `defaultsTo` Map.empty + `defaultsTo` Map.empty <*> optional (child "modules" el >>= children "module") - `defaultsTo` [] + `defaultsTo` [] <*> optional (child "dependencyManagement" el >>= children "dependency") - `defaultsTo` [] + `defaultsTo` [] <*> optional (child "dependencies" el >>= children "dependency") - `defaultsTo` [] + `defaultsTo` [] <*> optional (child "licenses" el >>= children "license") - `defaultsTo` [] + `defaultsTo` [] instance FromXML RawParent where -- TODO: move this documentation diff --git a/src/Strategy/NDB/Internal.hs b/src/Strategy/NDB/Internal.hs index f91e67fc6c..28b7d6d310 100644 --- a/src/Strategy/NDB/Internal.hs +++ b/src/Strategy/NDB/Internal.hs @@ -39,7 +39,7 @@ readNDB :: (Has Diagnostics sig m, Has ReadFS sig m) => Path Abs File -> m [NdbE readNDB file = do contents <- readContentsBS file blobs <- fromEitherParser $ BSL.fromStrict <$$> runParser parseNDB (show file) contents - entries <- traverse fromEitherShow $ readPackageInfo <$> blobs + entries <- traverse (fromEitherShow . readPackageInfo) blobs pure $ mapMaybe parsePkgInfo entries where -- FOSSA _requires_ that architecture is provided: https://github.com/fossas/FOSSA/blob/e61713dec1ef80dc6b6114f79622c14df5278235/modules/fetchers/README.md#locators-for-linux-packages diff --git a/src/Strategy/Node/Npm/PackageLock.hs b/src/Strategy/Node/Npm/PackageLock.hs index 72b08472ed..a3dfaf4af1 100644 --- a/src/Strategy/Node/Npm/PackageLock.hs +++ b/src/Strategy/Node/Npm/PackageLock.hs @@ -123,7 +123,7 @@ analyze file flatdeps workspacePackages = context "Analyzing Npm Lockfile" $ do Set.map pkgName $ (unTag @Production $ directDeps flatdeps) <> (unTag @Development $ devDeps flatdeps) --- |Node in the package-lock.json dep graph +-- | Node in the package-lock.json dep graph data NpmDepVertex = NpmDepVertex { lockName :: Text , lockVersion :: Text @@ -135,14 +135,14 @@ type NpmGrapher = LabeledGrapher NpmDepVertex NpmDepVertexLabel data NpmDepVertexLabel = NpmDepVertexEnv DepEnvironment | NpmDepVertexLocation Text deriving (Eq, Ord, Show) --- |The @packages@ object contains keys which are file paths to a package npm --- downloaded to @node_modules@. This function will adjust map keys to be names --- like in the @dependencies@ key by stripping out path components besides the final one.. +-- | The @packages@ object contains keys which are file paths to a package npm +-- downloaded to @node_modules@. This function will adjust map keys to be names +-- like in the @dependencies@ key by stripping out path components besides the final one.. -- --- When npm installs a dep inside of another dep (for version conflicts) we get the string --- @node_modules/a/node_modules/b@. We actually don't want to do sub-package matching here, --- so we only drop the first @node_modules/@, and if it has a sub-package, then we just --- won't ever query for that key. +-- When npm installs a dep inside of another dep (for version conflicts) we get the string +-- @node_modules/a/node_modules/b@. We actually don't want to do sub-package matching here, +-- so we only drop the first @node_modules/@, and if it has a sub-package, then we just +-- won't ever query for that key. packagePathsToNames :: Map Text a -> Map Text a packagePathsToNames = Map.mapKeys (TE.dropPrefix "node_modules/") diff --git a/src/Strategy/NuGet/Nuspec.hs b/src/Strategy/NuGet/Nuspec.hs index ca1bc3c242..6c6e342b77 100644 --- a/src/Strategy/NuGet/Nuspec.hs +++ b/src/Strategy/NuGet/Nuspec.hs @@ -152,7 +152,7 @@ instance FromXML Nuspec where metadata <- child "metadata" el Nuspec <$> optional (child "dependencies" metadata >>= children "group") - `defaultsTo` [] + `defaultsTo` [] <*> children "license" metadata <*> optional (child "licenseUrl" metadata) @@ -160,7 +160,7 @@ instance FromXML NuspecLicense where parseElement el = NuspecLicense <$> optional (attr "type" el) - `defaultsTo` "" + `defaultsTo` "" <*> content el instance FromXML Group where diff --git a/src/Strategy/Ruby/Parse.hs b/src/Strategy/Ruby/Parse.hs index 264addfa08..45296b71ae 100644 --- a/src/Strategy/Ruby/Parse.hs +++ b/src/Strategy/Ruby/Parse.hs @@ -27,7 +27,7 @@ import Text.Megaparsec.Char (char, space1, string) type Parser = Parsec Void Text --- |Given a single start delimiter, return start/end delimiters +-- | Given a single start delimiter, return start/end delimiters selectDelim :: Char -> (Char, Char) selectDelim = \case '{' -> ('{', '}') @@ -53,33 +53,33 @@ betweenDelim (d1, d2) = -- escaped ending delimiter before 'between' sees it and stops parsing. delimEscape = string $ "\\" <> toText d2 --- |This is a parser for a ruby string literal. The strings it parses could look --- these: +-- | This is a parser for a ruby string literal. The strings it parses could look +-- these: -- --- > "foo" --- > 'foo' +-- > "foo" +-- > 'foo' -- --- The ending ' or " can be escaped within the string by '\' +-- The ending ' or " can be escaped within the string by '\' -- --- > %q{foo} --- > %Q{foo} --- > %#foo# --- > %^foo^ +-- > %q{foo} +-- > %Q{foo} +-- > %#foo# +-- > %^foo^ -- --- The character after the 'Q' or '%' in the above examples is the delimiter --- for the string. If that character is '{', '(', '<', or '[' then its --- matching right-hand side is the closing delimiter. As with quotes, the --- ending delimiter can be escaped using '\' +-- The character after the 'Q' or '%' in the above examples is the delimiter +-- for the string. If that character is '{', '(', '<', or '[' then its +-- matching right-hand side is the closing delimiter. As with quotes, the +-- ending delimiter can be escaped using '\' -- --- Beyond the above, this parser also will consume and ignore '.freeze' or --- '.freeze()' that appears at the end of the string. It is a ruby idiom that --- turns a string into an immutable version of itself. It appears in some --- gemspec files and is safe to ignore. +-- Beyond the above, this parser also will consume and ignore '.freeze' or +-- '.freeze()' that appears at the end of the string. It is a ruby idiom that +-- turns a string into an immutable version of itself. It appears in some +-- gemspec files and is safe to ignore. -- --- An edge-case I ignore here is string interpolation. Ruby allows strings --- to contain text like `#{}`. The `` is evaluated and inserted --- into the string in place of the interpolation text. This parser ignores --- these and treats them like regular text. +-- An edge-case I ignore here is string interpolation. Ruby allows strings +-- to contain text like `#{}`. The `` is evaluated and inserted +-- into the string in place of the interpolation text. This parser ignores +-- these and treats them like regular text. rubyString :: Parser Text rubyString = stringText <* optional freezeMethod where @@ -99,13 +99,13 @@ data Assignment a = Assignment } deriving (Eq, Show) --- |Parser for a single assignment statement of the form: +-- | Parser for a single assignment statement of the form: -- --- > righthand.side = some-value +-- > righthand.side = some-value -- --- whitespace around the '=' is ignored. ' = ' portion of the assignment --- must appear on one line. The right-hand side is taken care of by the argument --- parser. +-- whitespace around the '=' is ignored. ' = ' portion of the assignment +-- must appear on one line. The right-hand side is taken care of by the argument +-- parser. parseRubyAssignment :: -- | Parser for the right-hand side of an assignment Parser a -> @@ -116,8 +116,8 @@ parseRubyAssignment rhs = Assignment <$> (labelP <* lexeme (char '=')) <*> value labelP = takeWhile1P Nothing (\c -> c /= '=' && not (isSpace c)) valueP = rhs --- |Consume 0 or more spaces or comments. A comment in ruby starts with --- '#' and extends to the end of the line. +-- | Consume 0 or more spaces or comments. A comment in ruby starts with +-- '#' and extends to the end of the line. rubySpc :: Parser () rubySpc = many (space1 <|> commentP) $> () where @@ -139,15 +139,15 @@ parseRubyArray p = char '[' *> sepBy (lexeme p) (char ',') <* char ']' newtype Symbol = Symbol {unSymbol :: Text} deriving (Show, Eq) --- |Parses a ruby symbol. Ex: --- > :this_is_a_symbol --- > :"this is also a symbol" --- > :'single quote symbol' +-- | Parses a ruby symbol. Ex: +-- > :this_is_a_symbol +-- > :"this is also a symbol" +-- > :'single quote symbol' -- --- The top-most example stops when a space or '=>' appears. There are likely --- other strings that should stop the parsing of a keyword in this case, but --- this parser is designed specifically for usages where the keyword is used --- as the key in a Ruby dictionary. +-- The top-most example stops when a space or '=>' appears. There are likely +-- other strings that should stop the parsing of a keyword in this case, but +-- this parser is designed specifically for usages where the keyword is used +-- as the key in a Ruby dictionary. parseRubySymbol :: Parser Symbol parseRubySymbol = Symbol @@ -169,11 +169,11 @@ parseRubySymbol = doubleQuoteSymbol = betweenDelim ('"', '"') singleQuoteSymbol = betweenDelim ('\'', '\'') --- |Parse a dictionary of the form: +-- | Parse a dictionary of the form: -- --- > { :key => val, :key2 => val2 } +-- > { :key => val, :key2 => val2 } -- --- The keys in the text should be symbols that 'parseRubySymbol' can parse. +-- The keys in the text should be symbols that 'parseRubySymbol' can parse. parseRubyDict :: Parser a -> Parser [(Symbol, a)] parseRubyDict rhs = between (char '{') (char '}') (sepBy keyValParse $ char ',') where @@ -193,15 +193,15 @@ podspecAssignmentValuesP :: Parser PodSpecAssignmentValue podspecAssignmentValuesP = (PodspecStr <$> rubyString) <|> (PodspecDict <$> parseRubyDict rubyString) --- |Ruby has a special syntax for making an array of strings that looks like --- these examples: +-- | Ruby has a special syntax for making an array of strings that looks like +-- these examples: -- --- > %w(foo bar) --- > %W[foo bar baz] +-- > %w(foo bar) +-- > %W[foo bar baz] -- --- This is interpreted as an array of strings. The delimiter after the 'w' can --- be arbitrary as with '%q'. The 'W' variant also allows interpolation, but as --- with 'rubyString' these are treated as ordinary text. +-- This is interpreted as an array of strings. The delimiter after the 'w' can +-- be arbitrary as with '%q'. The 'W' variant also allows interpolation, but as +-- with 'rubyString' these are treated as ordinary text. parseRubyWordsArray :: Parser [Text] parseRubyWordsArray = do (d1, d2) <- parsePrefix @@ -221,9 +221,9 @@ parseRubyWordsArray = do *> lookAhead anySingle ) --- |Try to parse any value that could potentially be a license. --- This parser only works for licenses that are a string literal or an --- array of string literals. +-- | Try to parse any value that could potentially be a license. +-- This parser only works for licenses that are a string literal or an +-- array of string literals. gemspecLicenseValuesP :: Parser [Text] gemspecLicenseValuesP = rubyArrayP <|> (singleton <$> rubyString) where diff --git a/src/Strategy/Sqlite.hs b/src/Strategy/Sqlite.hs index 9d1e4a345c..a5fd309881 100644 --- a/src/Strategy/Sqlite.hs +++ b/src/Strategy/Sqlite.hs @@ -135,7 +135,7 @@ readSqliteDBPackages sqlDbFile = writeTempFileAndFetchPkgRows :: Has (Lift IO) sig m => - -- |Bytestring for a sqlite package database + -- | Bytestring for a sqlite package database BS.ByteString -> m [(Int64, BS.ByteString)] writeTempFileAndFetchPkgRows sqliteBlob = diff --git a/test/App/Fossa/Container/AnalyzeNativeSpec.hs b/test/App/Fossa/Container/AnalyzeNativeSpec.hs index d7a1b5c435..8ebc81512f 100644 --- a/test/App/Fossa/Container/AnalyzeNativeSpec.hs +++ b/test/App/Fossa/Container/AnalyzeNativeSpec.hs @@ -111,9 +111,7 @@ spec = do buildImportsOf :: ContainerScan -> [Locator] buildImportsOf scan = concatMap buildImports $ - mapMaybe - sourceUnitBuild - (concatMap srcUnits $ imageLayers . imageData $ scan) + concatMap (mapMaybe sourceUnitBuild . srcUnits) (imageLayers . imageData $ scan) exampleImgWithoutTag :: Text exampleImgWithoutTag = "redis" diff --git a/test/Cargo/CargoTomlSpec.hs b/test/Cargo/CargoTomlSpec.hs index ffa173c583..566fc76774 100644 --- a/test/Cargo/CargoTomlSpec.hs +++ b/test/Cargo/CargoTomlSpec.hs @@ -51,8 +51,8 @@ cargoProject baseDir = , cargoToml = baseDir $(mkRelFile "Cargo.toml") } --- |The license-related fields for a Cargo.toml file are documented --- [here](https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields) +-- | The license-related fields for a Cargo.toml file are documented +-- [here](https://doc.rust-lang.org/cargo/reference/manifest.html#the-license-and-license-file-fields) licenseSpecs :: Spec licenseSpecs = do currentDir <- runIO getCurrentDir diff --git a/test/Data/RpmDbHeaderBlobSpec.hs b/test/Data/RpmDbHeaderBlobSpec.hs index af7a4055b6..048f22f058 100644 --- a/test/Data/RpmDbHeaderBlobSpec.hs +++ b/test/Data/RpmDbHeaderBlobSpec.hs @@ -2,7 +2,6 @@ module Data.RpmDbHeaderBlobSpec (spec) where import Data.Bifunctor (first) import Data.ByteString.Lazy qualified as BLS -import Data.Either (fromRight) import Data.Int (Int32) import Data.List (isSuffixOf) import Data.List.NonEmpty qualified as NonEmpty @@ -270,7 +269,7 @@ headerBlobSpec bs = describe "header blob parsing" $ do it "Parses entries" $ do -- this database is large, so we'll only check the first 2 entries - let entries = fromRight [] $ (NonEmpty.take 2 . entryMetadatas) <$> eBlob + let entries = either (const []) (NonEmpty.take 2 . entryMetadatas) eBlob entries `shouldMatchList` [ EntryMetadata @@ -304,12 +303,9 @@ headerBlobErrSpec = let checkErr (size, offset) suffix res = case res of Left (size', offset', errStr) -> - size' - == size - && offset' - == offset - && suffix - `isSuffixOf` errStr + size' == size + && offset' == offset + && suffix `isSuffixOf` errStr _ -> False it "Should report failure when parsing nonexistent index count" $