diff --git a/rocky/rocky.go b/rocky/rocky.go index 7612bbef..596f025e 100644 --- a/rocky/rocky.go +++ b/rocky/rocky.go @@ -5,7 +5,6 @@ import ( "bytes" "compress/gzip" "encoding/xml" - "errors" "fmt" "io" "log" @@ -13,7 +12,10 @@ import ( "os" "path" "path/filepath" + "regexp" + "sort" "strings" + "time" "github.com/aquasecurity/vuln-list-update/utils" "github.com/cheggaaa/pb/v3" @@ -30,8 +32,8 @@ const ( ) var ( - defaultURL = "https://download.rockylinux.org/pub/rocky/%s/%s/%s/os/" - defaultReleases = []string{"8"} + defaultURL = "https://download.rockylinux.org/pub/rocky/%s/%s/%s/os/repodata" + defaultReleases = map[string][]string{"8": {"8.3", "8.4", "8.5"}} defaultRepos = []string{"BaseOS", "AppStream", "extras"} defaultArches = []string{"x86_64", "aarch64"} ) @@ -54,11 +56,11 @@ type Location struct { // UpdateInfo has a list type UpdateInfo struct { - RLSAList []RLSA `xml:"update"` + Advisories []Advisory `xml:"update"` } -// RLSA has detailed data of Rocky Linux Security Advisory -type RLSA struct { +// Advisory has detailed data of Rocky Linux Advisory +type Advisory struct { ID string `xml:"id" json:"id,omitempty"` Title string `xml:"title" json:"title,omitempty"` Issued Date `xml:"issued" json:"issued,omitempty"` @@ -120,14 +122,14 @@ type options struct { concurrency int wait int retry int - releases []string + releases map[string][]string repos []string arches []string } type option func(*options) -func With(url string, dir string, concurrency, wait, retry int, releases, repos, arches []string) option { +func With(url string, dir string, concurrency, wait, retry int, releases map[string][]string, repos, arches []string) option { return func(opts *options) { opts.url = url opts.dir = dir @@ -165,12 +167,12 @@ func NewConfig(opts ...option) Config { } func (c Config) Update() error { - for _, release := range c.releases { + for majorVer, releases := range c.releases { for _, repo := range c.repos { for _, arch := range c.arches { - log.Printf("Fetching Rocky Linux %s %s %s data...", release, repo, arch) - if err := c.update(release, repo, arch); err != nil { - return xerrors.Errorf("failed to update security advisories of Rocky Linux %s %s %s: %w", release, repo, arch, err) + log.Printf("Fetching Rocky Linux %s %s %s data...", majorVer, repo, arch) + if err := c.update(majorVer, releases, repo, arch); err != nil { + return xerrors.Errorf("failed to update security advisories of Rocky Linux %s %s %s: %w", majorVer, repo, arch, err) } } } @@ -178,70 +180,86 @@ func (c Config) Update() error { return nil } -func (c Config) update(release, repo, arch string) error { - dirPath := filepath.Join(c.dir, release, repo, arch) - log.Printf("Remove Rocky Linux %s %s %s directory %s", release, repo, arch, dirPath) +func (c Config) update(majorVer string, releases []string, repo, arch string) error { + dirPath := filepath.Join(c.dir, majorVer, repo, arch) + log.Printf("Remove Rocky Linux %s %s %s directory %s", majorVer, repo, arch, dirPath) if err := os.RemoveAll(dirPath); err != nil { - return xerrors.Errorf("failed to remove Rocky Linux %s %s %s directory: %w", release, repo, arch, err) + return xerrors.Errorf("failed to remove Rocky Linux %s %s %s directory: %w", majorVer, repo, arch, err) } if err := os.MkdirAll(dirPath, os.ModePerm); err != nil { return xerrors.Errorf("failed to mkdir: %w", err) } - u, err := url.Parse(fmt.Sprintf(c.url, release, repo, arch)) - if err != nil { - return xerrors.Errorf("failed to parse root url: %w", err) - } - rootPath := u.Path - u.Path = path.Join(rootPath, "repodata/repomd.xml") - updateInfoPath, modulesPath, err := c.fetchUpdateInfoPath(u.String()) - if err != nil { - if errors.Is(err, ErrorNoUpdateInfoField) && repo == "extras" { - log.Printf("skip extras repository because updateinfo field is not in repomd.xml: %s", err) - return nil + advisories := map[string]Advisory{} + for _, release := range releases { + u, err := url.Parse(fmt.Sprintf(c.url, release, repo, arch)) + if err != nil { + return xerrors.Errorf("failed to parse root url: %w", err) } - return xerrors.Errorf("failed to fetch updateInfo path from repomd.xml: %w", err) - } - - u.Path = path.Join(rootPath, updateInfoPath) - uinfo, err := c.fetchUpdateInfo(u.String()) - if err != nil { - return xerrors.Errorf("failed to fetch updateInfo: %w", err) - } + rootPath := u.Path - modules := map[string]ModuleInfo{} - if modulesPath != "" { - u.Path = path.Join(rootPath, modulesPath) - modules, err = c.fetchModulesFromYaml(u.String()) + log.Printf("Fetching Rocky Linux %s %s %s Advisory filename from repodata html...", release, repo, arch) + advFiles, err := c.fetchAdvisoryFiles(u.String()) if err != nil { - return xerrors.Errorf("failed to fetch modules info: %w", err) + return xerrors.Errorf("failed to fetch advisory files: %w", err) } - } - if err := extractModulesToUpdateInfo(uinfo, modules); err != nil { - return xerrors.Errorf("failed to extract modules to updateinfo: %w", err) - } + for _, advFile := range advFiles { + log.Printf("Fetching advisory. updateinfo.xml.gz: %.10s, modules.yaml.xz: %.10s", advFile.updateinfo, advFile.modules) - secErrata := map[string][]RLSA{} - for _, rlsa := range uinfo.RLSAList { - if !strings.HasPrefix(rlsa.ID, "RLSA-") { - continue + u.Path = path.Join(rootPath, advFile.updateinfo) + uinfo, err := c.fetchSecurityAdvisory(u.String()) + if err != nil { + return xerrors.Errorf("failed to fetch updateInfo: %w", err) + } + + modules := map[string]ModuleInfo{} + if advFile.modules != "" { + u.Path = path.Join(rootPath, advFile.modules) + modules, err = c.fetchModulesFromYaml(u.String()) + if err != nil { + return xerrors.Errorf("failed to fetch modules info: %w", err) + } + } + + if err := extractModulesToUpdateInfo(uinfo, modules); err != nil { + return xerrors.Errorf("failed to extract modules to updateinfo: %w", err) + } + + for _, adv1 := range uinfo.Advisories { + adv2, ok := advisories[adv1.ID] + if ok { + if adv1.Updated.Date != adv2.Updated.Date { + advisories[adv1.ID] = adv1 + } else if adv1.Issued.Date != adv2.Issued.Date { + advisories[adv1.ID] = adv1 + } else if len(adv2.PkgLists) == 0 { + advisories[adv1.ID] = adv1 + } + } else { + advisories[adv1.ID] = adv1 + } + } } + } + + advPerYear := map[string][]Advisory{} + for _, rlsa := range advisories { y := strings.Split(strings.TrimPrefix(rlsa.ID, "RLSA-"), ":")[0] - secErrata[y] = append(secErrata[y], rlsa) + advPerYear[y] = append(advPerYear[y], rlsa) } - for year, errata := range secErrata { - log.Printf("Write Errata for Rocky Linux %s %s %s %s", release, repo, arch, year) + for year, advs := range advPerYear { + log.Printf("Write Errata for Rocky Linux %s %s %s %s", majorVer, repo, arch, year) if err := os.MkdirAll(filepath.Join(dirPath, year), os.ModePerm); err != nil { return xerrors.Errorf("failed to mkdir: %w", err) } - bar := pb.StartNew(len(errata)) - for _, erratum := range errata { - jsonPath := filepath.Join(dirPath, year, fmt.Sprintf("%s.json", erratum.ID)) - if err := utils.Write(jsonPath, erratum); err != nil { + bar := pb.StartNew(len(advs)) + for _, adv := range advs { + jsonPath := filepath.Join(dirPath, year, fmt.Sprintf("%s.json", adv.ID)) + if err := utils.Write(jsonPath, adv); err != nil { return xerrors.Errorf("failed to write Rocky Linux CVE details: %w", err) } bar.Increment() @@ -252,34 +270,71 @@ func (c Config) update(release, repo, arch string) error { return nil } -var ErrorNoUpdateInfoField = xerrors.New("no updateinfo field in the repomd") +type advisoryFile struct { + updateinfo string + modules string +} + +var repodataPattern = regexp.MustCompile(`^.*\s(\d{2}-(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-\d{4}\s\d{2}:\d{2}).*$`) -func (c Config) fetchUpdateInfoPath(repomdURL string) (updateInfoPath, modulesPath string, err error) { - res, err := utils.FetchURL(repomdURL, "", c.retry) +func (c Config) fetchAdvisoryFiles(repodataURL string) ([]advisoryFile, error) { + res, err := utils.FetchURL(repodataURL, "", c.retry) if err != nil { - return "", "", xerrors.Errorf("failed to fetch %s: %w", repomdURL, err) + return nil, xerrors.Errorf("failed to fetch %s: %w", repodataURL, err) } - var repoMd RepoMd - if err := xml.NewDecoder(bytes.NewBuffer(res)).Decode(&repoMd); err != nil { - return "", "", xerrors.Errorf("failed to decode repomd.xml: %w", err) + type fileinfo struct { + name string + date time.Time } - for _, repo := range repoMd.RepoList { - if repo.Type == "updateinfo" { - updateInfoPath = repo.Location.Href + uinfos := []fileinfo{} + modules := []fileinfo{} + scanner := bufio.NewScanner(bytes.NewBuffer(res)) + for scanner.Scan() { + match := repodataPattern.FindStringSubmatch(scanner.Text()) + if len(match) != 3 { + continue + } + t, err := time.Parse("02-Jan-2006 15:04", match[2]) + if err != nil { + return nil, xerrors.Errorf("failed to parse repodata html time: %w", err) } - if repo.Type == "modules" { - modulesPath = repo.Location.Href + finfo := fileinfo{ + name: match[1], + date: t, + } + if strings.HasSuffix(finfo.name, "-updateinfo.xml.gz") { + uinfos = append(uinfos, finfo) + } else { + modules = append(modules, finfo) } } - if updateInfoPath == "" { - return "", "", ErrorNoUpdateInfoField + sort.Slice(uinfos, func(i, j int) bool { return uinfos[i].date.Before(uinfos[j].date) }) + sort.Slice(modules, func(i, j int) bool { return modules[i].date.Before(modules[j].date) }) + + advFiles := []advisoryFile{} + latestModuleIdx := 0 + for _, uinfo := range uinfos { + advFile := advisoryFile{ + updateinfo: uinfo.name, + } + // find the most recent modules as of the updateinfo date + for i, module := range modules[latestModuleIdx:] { + if module.date.After(uinfo.date) { + if i > 0 { + latestModuleIdx = latestModuleIdx + i - 1 + } + break + } + advFile.modules = module.name + } + advFiles = append(advFiles, advFile) } - return updateInfoPath, modulesPath, nil + return advFiles, nil } -func (c Config) fetchUpdateInfo(url string) (*UpdateInfo, error) { +func (c Config) fetchSecurityAdvisory(url string) (*UpdateInfo, error) { res, err := utils.FetchURL(url, "", c.retry) if err != nil { return nil, xerrors.Errorf("failed to fetch updateInfo: %w", err) @@ -294,15 +349,22 @@ func (c Config) fetchUpdateInfo(url string) (*UpdateInfo, error) { if err := xml.NewDecoder(r).Decode(&updateInfo); err != nil { return nil, err } - for i, alas := range updateInfo.RLSAList { + + advs := []Advisory{} + for _, adv := range updateInfo.Advisories { + if !strings.HasPrefix(adv.ID, "RLSA-") { + continue + } var cveIDs []string - for _, ref := range alas.References { + for _, ref := range adv.References { if ref.Type == "cve" { cveIDs = append(cveIDs, ref.ID) } } - updateInfo.RLSAList[i].CveIDs = cveIDs + adv.CveIDs = cveIDs + advs = append(advs, adv) } + updateInfo.Advisories = advs return &updateInfo, nil } @@ -378,34 +440,24 @@ func extractModulesToUpdateInfo(uinfo *UpdateInfo, modules map[string]ModuleInfo pkgToModuleStr := convertToPkgToModuleStr(modules) missingModuleIDs := []string{} - for i := range uinfo.RLSAList { - moduleStrToPkgs := convertToModuleStrToPkgs(uinfo.RLSAList[i].Packages, pkgToModuleStr) + for i := range uinfo.Advisories { + moduleStrToPkgs := convertToModuleStrToPkgs(uinfo.Advisories[i].Packages, pkgToModuleStr) if len(moduleStrToPkgs) == 0 { - missingModuleIDs = append(missingModuleIDs, uinfo.RLSAList[i].ID) + missingModuleIDs = append(missingModuleIDs, uinfo.Advisories[i].ID) continue } pkgLists := []PkgList{} for modularStr, pkgs := range moduleStrToPkgs { - var module Module - if modularStr != "" { - minfo := modules[modularStr] - module = Module{ - Stream: minfo.Data.Stream, - Name: minfo.Data.Name, - Version: minfo.Data.Version, - Arch: minfo.Data.Arch, - Context: minfo.Data.Context, - } + pkglist, err := buildPkgList(modules, modularStr, pkgs) + if err != nil { + return xerrors.Errorf("failed to build pkglist: %w", err) } - pkgLists = append(pkgLists, PkgList{ - Packages: pkgs, - Module: module, - }) + pkgLists = append(pkgLists, pkglist) } - uinfo.RLSAList[i].PkgLists = pkgLists - uinfo.RLSAList[i].Packages = nil + uinfo.Advisories[i].PkgLists = pkgLists + uinfo.Advisories[i].Packages = nil } if len(missingModuleIDs) > 0 { @@ -442,3 +494,69 @@ func convertToModuleStrToPkgs(pkgs []Package, pkgToModuleStr map[string]string) } return moduleStrToPkgs } + +func buildPkgList(modules map[string]ModuleInfo, modularStr string, pkgs []Package) (PkgList, error) { + var module Module + if modularStr != "" { + minfo := modules[modularStr] + module = Module{ + Stream: minfo.Data.Stream, + Name: minfo.Data.Name, + Version: minfo.Data.Version, + Arch: minfo.Data.Arch, + Context: minfo.Data.Context, + } + + pkgs = []Package{} + for _, pkg := range minfo.Data.Artifacts.Rpms { + name, ver, rel, epoch, arch, err := splitFileName(pkg) + if err != nil { + return PkgList{}, xerrors.Errorf("failed to split rpm filename: %w", err) + } + pkgs = append(pkgs, Package{ + Name: name, + Epoch: epoch, + Version: ver, + Release: rel, + Arch: arch, + Filename: fmt.Sprintf("%s-%s-%s.%s.rpm", name, ver, rel, arch), + }) + } + } + + return PkgList{Packages: pkgs, Module: module}, nil +} + +// splitFileName returns a name, version, release, epoch, arch +func splitFileName(filename string) (name, ver, rel, epoch, arch string, err error) { + filename = strings.TrimSuffix(filename, ".rpm") + + archIndex := strings.LastIndex(filename, ".") + if archIndex == -1 { + return "", "", "", "", "", xerrors.Errorf("failed to parse arch from filename: %s", filename) + } + arch = filename[archIndex+1:] + + relIndex := strings.LastIndex(filename[:archIndex], "-") + if relIndex == -1 { + return "", "", "", "", "", xerrors.Errorf("failed to parse release from filename: %s", filename) + } + rel = filename[relIndex+1 : archIndex] + + verIndex := strings.LastIndex(filename[:relIndex], "-") + if verIndex == -1 { + return "", "", "", "", "", xerrors.Errorf("failed to parse version from filename: %s", filename) + } + ver = filename[verIndex+1 : relIndex] + + epochIndex := strings.Index(ver, ":") + if epochIndex == -1 { + epoch = "0" + } else { + epoch = ver[:epochIndex] + ver = ver[epochIndex+1:] + } + + name = filename[:verIndex] + return name, ver, rel, epoch, arch, nil +}