Index fixes and performance improvements (fixes #223, #224)

- Fixed creation of duplicate index entries when importing from CurseForge (fixes #224)
- Automatically remove duplicates in index
- Fixed `packwiz serve` with a custom `--pack-root` argument (fixes #223)
- Fixed `packwiz serve` with a custom index.toml location
- Cleaned up internal serving code, added comments and better errors
- Refactored path handling code
- Improved refreshing/exporting performance
- Factored out duplicated exporting logic
- Replaced GetAllMods calls with cleaner LoadAllMods calls and made the former private
- Improved variable names in update command
- Improved handling of aliassed files
- Changed CheckUpdate to take references to metadata
- Removed the ability to use an absolute path to the index file (that probably didn't work anyway)
- Behaviour change: order of entries in exported files may be random
This commit is contained in:
comp500 2023-05-29 23:15:55 +01:00
parent d25817273b
commit 4fb1f1b59d
14 changed files with 401 additions and 352 deletions

View File

@ -48,8 +48,8 @@ var serveCmd = &cobra.Command{
fmt.Println(err) fmt.Println(err)
os.Exit(1) os.Exit(1)
} }
indexPath := filepath.Join(filepath.Dir(viper.GetString("pack-file")), filepath.FromSlash(pack.Index.File)) packServeDir := filepath.Dir(viper.GetString("pack-file"))
indexDir := filepath.Dir(indexPath) packFileName := filepath.Base(viper.GetString("pack-file"))
t, err := template.New("index-page").Parse(indexPage) t, err := template.New("index-page").Parse(indexPage)
if err != nil { if err != nil {
@ -75,79 +75,50 @@ var serveCmd = &cobra.Command{
return return
} }
// Relative to pack.toml
urlPath := strings.TrimPrefix(path.Clean("/"+strings.TrimPrefix(req.URL.Path, "/")), "/") urlPath := strings.TrimPrefix(path.Clean("/"+strings.TrimPrefix(req.URL.Path, "/")), "/")
indexRelPath, err := filepath.Rel(indexDir, filepath.FromSlash(urlPath)) // Convert to absolute
destPath := filepath.Join(packServeDir, filepath.FromSlash(urlPath))
// Relativisation needs to be done using filepath, as path doesn't have Rel!
// (now using index util function)
// Relative to index.toml ("pack root")
indexRelPath, err := index.RelIndexPath(destPath)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println("Failed to parse path", err)
return return
} }
indexRelPathSlash := path.Clean(filepath.ToSlash(indexRelPath))
var destPath string
found := false if urlPath == path.Clean(pack.Index.File) {
if urlPath == filepath.ToSlash(indexPath) {
found = true
destPath = indexPath
// Must be done here, to ensure all paths gain the lock at some point // Must be done here, to ensure all paths gain the lock at some point
refreshMutex.RLock() refreshMutex.RLock()
} else if urlPath == filepath.ToSlash(viper.GetString("pack-file")) { } else if urlPath == packFileName { // Only need to compare name - already relative to pack.toml
found = true
if viper.GetBool("serve.refresh") { if viper.GetBool("serve.refresh") {
// Get write lock, to do a refresh // Get write lock, to do a refresh
refreshMutex.Lock() refreshMutex.Lock()
// Reload pack and index (might have changed on disk) // Reload pack and index (might have changed on disk)
pack, err = core.LoadPack() err = doServeRefresh(&pack, &index)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println("Failed to refresh pack", err)
return return
} }
index, err = pack.LoadIndex()
if err != nil {
fmt.Println(err)
return
}
err = index.Refresh()
if err != nil {
fmt.Println(err)
return
}
err = index.Write()
if err != nil {
fmt.Println(err)
return
}
err = pack.UpdateIndexHash()
if err != nil {
fmt.Println(err)
return
}
err = pack.Write()
if err != nil {
fmt.Println(err)
return
}
fmt.Println("Index refreshed!")
// Downgrade to a read lock // Downgrade to a read lock
refreshMutex.Unlock() refreshMutex.Unlock()
} }
refreshMutex.RLock() refreshMutex.RLock()
destPath = viper.GetString("pack-file")
} else { } else {
refreshMutex.RLock() refreshMutex.RLock()
// Only allow indexed files // Only allow indexed files
for _, v := range index.Files { if _, found := index.Files[indexRelPath]; !found {
if indexRelPathSlash == v.File { fmt.Printf("File not found: %s\n", destPath)
found = true refreshMutex.RUnlock()
break w.WriteHeader(404)
} _, _ = w.Write([]byte("File not found"))
} return
if found {
destPath = filepath.FromSlash(urlPath)
} }
} }
defer refreshMutex.RUnlock() defer refreshMutex.RUnlock()
if found {
f, err := os.Open(destPath) f, err := os.Open(destPath)
if err != nil { if err != nil {
fmt.Printf("Error reading file \"%s\": %s\n", destPath, err) fmt.Printf("Error reading file \"%s\": %s\n", destPath, err)
@ -166,12 +137,6 @@ var serveCmd = &cobra.Command{
_, _ = w.Write([]byte("Failed to read file")) _, _ = w.Write([]byte("Failed to read file"))
return return
} }
} else {
fmt.Printf("File not found: %s\n", destPath)
w.WriteHeader(404)
_, _ = w.Write([]byte("File not found"))
return
}
}) })
} }
@ -184,6 +149,37 @@ var serveCmd = &cobra.Command{
}, },
} }
func doServeRefresh(pack *core.Pack, index *core.Index) error {
var err error
*pack, err = core.LoadPack()
if err != nil {
return err
}
*index, err = pack.LoadIndex()
if err != nil {
return err
}
err = index.Refresh()
if err != nil {
return err
}
err = index.Write()
if err != nil {
return err
}
err = pack.UpdateIndexHash()
if err != nil {
return err
}
err = pack.Write()
if err != nil {
return err
}
fmt.Println("Index refreshed!")
return nil
}
func init() { func init() {
rootCmd.AddCommand(serveCmd) rootCmd.AddCommand(serveCmd)

View File

@ -33,27 +33,26 @@ var UpdateCmd = &cobra.Command{
var singleUpdatedName string var singleUpdatedName string
if viper.GetBool("update.all") { if viper.GetBool("update.all") {
updaterMap := make(map[string][]core.Mod) filesWithUpdater := make(map[string][]*core.Mod)
fmt.Println("Reading metadata files...") fmt.Println("Reading metadata files...")
for _, v := range index.GetAllMods() { mods, err := index.LoadAllMods()
modData, err := core.LoadMod(v)
if err != nil { if err != nil {
fmt.Printf("Error reading metadata file: %s\n", err.Error()) fmt.Printf("Failed to update all files: %v\n", err)
continue os.Exit(1)
} }
for _, modData := range mods {
updaterFound := false updaterFound := false
for k := range modData.Update { for k := range modData.Update {
slice, ok := updaterMap[k] slice, ok := filesWithUpdater[k]
if !ok { if !ok {
_, ok = core.Updaters[k] _, ok = core.Updaters[k]
if !ok { if !ok {
continue continue
} }
slice = []core.Mod{} slice = []*core.Mod{}
} }
updaterFound = true updaterFound = true
updaterMap[k] = append(slice, modData) filesWithUpdater[k] = append(slice, modData)
} }
if !updaterFound { if !updaterFound {
fmt.Printf("A supported update system for \"%s\" cannot be found.\n", modData.Name) fmt.Printf("A supported update system for \"%s\" cannot be found.\n", modData.Name)
@ -62,9 +61,9 @@ var UpdateCmd = &cobra.Command{
fmt.Println("Checking for updates...") fmt.Println("Checking for updates...")
updatesFound := false updatesFound := false
updaterPointerMap := make(map[string][]*core.Mod) updatableFiles := make(map[string][]*core.Mod)
updaterCachedStateMap := make(map[string][]interface{}) updaterCachedStateMap := make(map[string][]interface{})
for k, v := range updaterMap { for k, v := range filesWithUpdater {
checks, err := core.Updaters[k].CheckUpdate(v, pack) checks, err := core.Updaters[k].CheckUpdate(v, pack)
if err != nil { if err != nil {
// TODO: do we return err code 1? // TODO: do we return err code 1?
@ -83,7 +82,7 @@ var UpdateCmd = &cobra.Command{
updatesFound = true updatesFound = true
} }
fmt.Printf("%s: %s\n", v[i].Name, check.UpdateString) fmt.Printf("%s: %s\n", v[i].Name, check.UpdateString)
updaterPointerMap[k] = append(updaterPointerMap[k], &v[i]) updatableFiles[k] = append(updatableFiles[k], v[i])
updaterCachedStateMap[k] = append(updaterCachedStateMap[k], check.CachedState) updaterCachedStateMap[k] = append(updaterCachedStateMap[k], check.CachedState)
} }
} }
@ -99,7 +98,7 @@ var UpdateCmd = &cobra.Command{
return return
} }
for k, v := range updaterPointerMap { for k, v := range updatableFiles {
err := core.Updaters[k].DoUpdate(v, updaterCachedStateMap[k]) err := core.Updaters[k].DoUpdate(v, updaterCachedStateMap[k])
if err != nil { if err != nil {
// TODO: do we return err code 1? // TODO: do we return err code 1?
@ -143,7 +142,7 @@ var UpdateCmd = &cobra.Command{
} }
updaterFound = true updaterFound = true
check, err := updater.CheckUpdate([]core.Mod{modData}, pack) check, err := updater.CheckUpdate([]*core.Mod{&modData}, pack)
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
os.Exit(1) os.Exit(1)

View File

@ -6,6 +6,7 @@ import (
"github.com/packwiz/packwiz/core" "github.com/packwiz/packwiz/core"
"io" "io"
"os" "os"
"path"
"path/filepath" "path/filepath"
) )
@ -29,7 +30,7 @@ func ListManualDownloads(session core.DownloadSession) {
} }
} }
func AddToZip(dl core.CompletedDownload, exp *zip.Writer, dir string, indexPath string) bool { func AddToZip(dl core.CompletedDownload, exp *zip.Writer, dir string, index *core.Index) bool {
if dl.Error != nil { if dl.Error != nil {
fmt.Printf("Download of %s (%s) failed: %v\n", dl.Mod.Name, dl.Mod.FileName, dl.Error) fmt.Printf("Download of %s (%s) failed: %v\n", dl.Mod.Name, dl.Mod.FileName, dl.Error)
return false return false
@ -38,24 +39,24 @@ func AddToZip(dl core.CompletedDownload, exp *zip.Writer, dir string, indexPath
fmt.Printf("Warning for %s (%s): %v\n", dl.Mod.Name, dl.Mod.FileName, warning) fmt.Printf("Warning for %s (%s): %v\n", dl.Mod.Name, dl.Mod.FileName, warning)
} }
path, err := filepath.Rel(filepath.Dir(indexPath), dl.Mod.GetDestFilePath()) p, err := index.RelIndexPath(dl.Mod.GetDestFilePath())
if err != nil { if err != nil {
fmt.Printf("Error resolving external file: %v\n", err) fmt.Printf("Error resolving external file: %v\n", err)
return false return false
} }
modFile, err := exp.Create(filepath.ToSlash(filepath.Join(dir, path))) modFile, err := exp.Create(path.Join(dir, p))
if err != nil { if err != nil {
fmt.Printf("Error creating metadata file %s: %v\n", path, err) fmt.Printf("Error creating metadata file %s: %v\n", p, err)
return false return false
} }
_, err = io.Copy(modFile, dl.File) _, err = io.Copy(modFile, dl.File)
if err != nil { if err != nil {
fmt.Printf("Error copying file %s: %v\n", path, err) fmt.Printf("Error copying file %s: %v\n", p, err)
return false return false
} }
err = dl.File.Close() err = dl.File.Close()
if err != nil { if err != nil {
fmt.Printf("Error closing file %s: %v\n", path, err) fmt.Printf("Error closing file %s: %v\n", p, err)
return false return false
} }
@ -63,6 +64,37 @@ func AddToZip(dl core.CompletedDownload, exp *zip.Writer, dir string, indexPath
return true return true
} }
// AddNonMetafileOverrides saves all non-metadata files into an overrides folder in the zip
func AddNonMetafileOverrides(index *core.Index, exp *zip.Writer) {
for p, v := range index.Files {
if !v.IsMetaFile() {
file, err := exp.Create(path.Join("overrides", p))
if err != nil {
fmt.Printf("Error creating file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
// Attempt to read the file from disk, without checking hashes (assumed to have no errors)
src, err := os.Open(index.ResolveIndexPath(p))
if err != nil {
_ = src.Close()
fmt.Printf("Error reading file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
_, err = io.Copy(file, src)
if err != nil {
_ = src.Close()
fmt.Printf("Error copying file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
_ = src.Close()
}
}
}
func PrintDisclaimer(isCf bool) { func PrintDisclaimer(isCf bool) {
fmt.Println("Disclaimer: you are responsible for ensuring you comply with ALL the licenses, or obtain appropriate permissions, for the files \"added to zip\" below") fmt.Println("Disclaimer: you are responsible for ensuring you comply with ALL the licenses, or obtain appropriate permissions, for the files \"added to zip\" below")
if isCf { if isCf {

View File

@ -1,15 +1,12 @@
package core package core
import ( import (
"errors"
"fmt" "fmt"
"golang.org/x/exp/slices"
"io" "io"
"io/fs" "io/fs"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"sort"
"strings" "strings"
"time" "time"
@ -22,116 +19,59 @@ import (
// Index is a representation of the index.toml file for referencing all the files in a pack. // Index is a representation of the index.toml file for referencing all the files in a pack.
type Index struct { type Index struct {
HashFormat string `toml:"hash-format"` HashFormat string
Files []IndexFile `toml:"files"` Files IndexFiles
indexFile string indexFile string
packRoot string
} }
// IndexFile is a file in the index // indexTomlRepresentation is the TOML representation of Index (Files must be converted)
type IndexFile struct { type indexTomlRepresentation struct {
// Files are stored in forward-slash format relative to the index file HashFormat string `toml:"hash-format"`
File string `toml:"file"` Files indexFilesTomlRepresentation `toml:"files"`
Hash string `toml:"hash,omitempty"`
HashFormat string `toml:"hash-format,omitempty"`
Alias string `toml:"alias,omitempty"`
MetaFile bool `toml:"metafile,omitempty"` // True when it is a .toml metadata file
Preserve bool `toml:"preserve,omitempty"` // Don't overwrite the file when updating
fileExistsTemp bool
} }
// LoadIndex attempts to load the index file from a path // LoadIndex attempts to load the index file from a path
func LoadIndex(indexFile string) (Index, error) { func LoadIndex(indexFile string) (Index, error) {
var index Index // Decode as indexTomlRepresentation then convert to Index
if _, err := toml.DecodeFile(indexFile, &index); err != nil { var rep indexTomlRepresentation
if _, err := toml.DecodeFile(indexFile, &rep); err != nil {
return Index{}, err return Index{}, err
} }
index.indexFile = indexFile if len(rep.HashFormat) == 0 {
if len(index.HashFormat) == 0 { rep.HashFormat = "sha256"
index.HashFormat = "sha256" }
index := Index{
HashFormat: rep.HashFormat,
Files: rep.Files.toMemoryRep(),
indexFile: indexFile,
packRoot: filepath.Dir(indexFile),
} }
return index, nil return index, nil
} }
// RemoveFile removes a file from the index. // RemoveFile removes a file from the index, given a file path
func (in *Index) RemoveFile(path string) error { func (in *Index) RemoveFile(path string) error {
relPath, err := filepath.Rel(filepath.Dir(in.indexFile), path) relPath, err := in.RelIndexPath(path)
if err != nil { if err != nil {
return err return err
} }
delete(in.Files, relPath)
i := 0
for _, file := range in.Files {
if filepath.Clean(filepath.FromSlash(file.File)) != relPath {
// Keep file, as it doesn't match
in.Files[i] = file
i++
}
}
in.Files = in.Files[:i]
return nil return nil
} }
// resortIndex sorts Files by file name func (in *Index) updateFileHashGiven(path, format, hash string, markAsMetaFile bool) error {
func (in *Index) resortIndex() { // Remove format if equal to index hash format
sort.SliceStable(in.Files, func(i, j int) bool {
// TODO: Compare by alias if names are equal?
// TODO: Remove duplicated entries? (compound key on file/alias?)
return in.Files[i].File < in.Files[j].File
})
}
func (in *Index) markFound(i int, format, hash string) {
// Update hash
in.Files[i].Hash = hash
if in.HashFormat == format { if in.HashFormat == format {
in.Files[i].HashFormat = "" format = ""
} else {
in.Files[i].HashFormat = format
}
// Mark this file as found
in.Files[i].fileExistsTemp = true
} }
func (in *Index) updateFileHashGiven(path, format, hash string, mod bool) error {
// Find in index // Find in index
relPath, err := filepath.Rel(filepath.Dir(in.indexFile), path) relPath, err := in.RelIndexPath(path)
if err != nil { if err != nil {
return err return err
} }
slashPath := filepath.ToSlash(relPath) in.Files.updateFileEntry(relPath, format, hash, markAsMetaFile)
// Binary search for slashPath in the files list
i, found := slices.BinarySearchFunc(in.Files, IndexFile{File: slashPath}, func(a IndexFile, b IndexFile) int {
return strings.Compare(a.File, b.File)
})
if found {
in.markFound(i, format, hash)
// There may be other entries with the same file path but different alias!
// Search back and forth to find them:
j := i
for j > 0 && in.Files[j-1].File == slashPath {
j = j - 1
in.markFound(j, format, hash)
}
j = i
for j < len(in.Files)-1 && in.Files[j+1].File == slashPath {
j = j + 1
in.markFound(j, format, hash)
}
} else {
newFile := IndexFile{
File: slashPath,
Hash: hash,
fileExistsTemp: true,
}
// Override hash format for this file, if the whole index isn't sha256
if in.HashFormat != format {
newFile.HashFormat = format
}
newFile.MetaFile = mod
in.Files = append(in.Files, newFile)
}
return nil return nil
} }
@ -165,17 +105,27 @@ func (in *Index) updateFile(path string) error {
hashString = h.HashToString(h.Sum(nil)) hashString = h.HashToString(h.Sum(nil))
} }
mod := false markAsMetaFile := false
// If the file has an extension of pw.toml, set mod to true // If the file has an extension of pw.toml, set markAsMetaFile to true
if strings.HasSuffix(filepath.Base(path), MetaExtension) { if strings.HasSuffix(filepath.Base(path), MetaExtension) {
mod = true markAsMetaFile = true
} }
return in.updateFileHashGiven(path, "sha256", hashString, mod) return in.updateFileHashGiven(path, "sha256", hashString, markAsMetaFile)
} }
func (in Index) GetPackRoot() string { // ResolveIndexPath turns a path from the index into a file path on disk
return filepath.Dir(in.indexFile) func (in Index) ResolveIndexPath(p string) string {
return filepath.Join(in.packRoot, filepath.FromSlash(p))
}
// RelIndexPath turns a file path on disk into a path from the index
func (in Index) RelIndexPath(p string) (string, error) {
rel, err := filepath.Rel(in.packRoot, p)
if err != nil {
return "", err
}
return filepath.ToSlash(rel), nil
} }
var ignoreDefaults = []string{ var ignoreDefaults = []string{
@ -223,19 +173,18 @@ func (in *Index) Refresh() error {
pathPF, _ := filepath.Abs(viper.GetString("pack-file")) pathPF, _ := filepath.Abs(viper.GetString("pack-file"))
pathIndex, _ := filepath.Abs(in.indexFile) pathIndex, _ := filepath.Abs(in.indexFile)
packRoot := in.GetPackRoot() pathIgnore, _ := filepath.Abs(filepath.Join(in.packRoot, ".packwizignore"))
pathIgnore, _ := filepath.Abs(filepath.Join(packRoot, ".packwizignore"))
ignore, ignoreExists := readGitignore(pathIgnore) ignore, ignoreExists := readGitignore(pathIgnore)
var fileList []string var fileList []string
err := filepath.WalkDir(packRoot, func(path string, info os.DirEntry, err error) error { err := filepath.WalkDir(in.packRoot, func(path string, info os.DirEntry, err error) error {
if err != nil { if err != nil {
// TODO: Handle errors on individual files properly // TODO: Handle errors on individual files properly
return err return err
} }
// Never ignore pack root itself (gitignore doesn't allow ignoring the root) // Never ignore pack root itself (gitignore doesn't allow ignoring the root)
if path == packRoot { if path == in.packRoot {
return nil return nil
} }
@ -285,13 +234,6 @@ func (in *Index) Refresh() error {
), ),
) )
// Normalise file paths: updateFile needs to compare path equality
for i := range in.Files {
in.Files[i].File = path.Clean(in.Files[i].File)
}
// Resort index (required by updateFile)
in.resortIndex()
for _, v := range fileList { for _, v := range fileList {
start := time.Now() start := time.Now()
@ -307,34 +249,23 @@ func (in *Index) Refresh() error {
progressContainer.Wait() progressContainer.Wait()
// Check all the files exist, remove them if they don't // Check all the files exist, remove them if they don't
i := 0 for p, file := range in.Files {
for _, file := range in.Files { if !file.markedFound() {
if file.fileExistsTemp { delete(in.Files, p)
// Keep file if it exists (already checked in updateFile)
in.Files[i] = file
i++
} }
} }
in.Files = in.Files[:i]
in.resortIndex()
return nil
}
// RefreshFile calculates the hash for a given path and updates it in the index (also sorts the index)
func (in *Index) RefreshFile(path string) error {
// Resort index first (required by updateFile)
in.resortIndex()
err := in.updateFile(path)
if err != nil {
return err
}
in.resortIndex()
return nil return nil
} }
// Write saves the index file // Write saves the index file
func (in Index) Write() error { func (in Index) Write() error {
// Convert to indexTomlRepresentation
rep := indexTomlRepresentation{
HashFormat: in.HashFormat,
Files: in.Files.toTomlRep(),
}
// TODO: calculate and provide hash while writing? // TODO: calculate and provide hash while writing?
f, err := os.Create(in.indexFile) f, err := os.Create(in.indexFile)
if err != nil { if err != nil {
@ -344,7 +275,7 @@ func (in Index) Write() error {
enc := toml.NewEncoder(f) enc := toml.NewEncoder(f)
// Disable indentation // Disable indentation
enc.Indent = "" enc.Indent = ""
err = enc.Encode(in) err = enc.Encode(rep)
if err != nil { if err != nil {
_ = f.Close() _ = f.Close()
return err return err
@ -352,42 +283,34 @@ func (in Index) Write() error {
return f.Close() return f.Close()
} }
// RefreshFileWithHash updates a file in the index, given a file hash and whether it is a mod or not // RefreshFileWithHash updates a file in the index, given a file hash and whether it should be marked as metafile or not
func (in *Index) RefreshFileWithHash(path, format, hash string, mod bool) error { func (in *Index) RefreshFileWithHash(path, format, hash string, markAsMetaFile bool) error {
if viper.GetBool("no-internal-hashes") { if viper.GetBool("no-internal-hashes") {
hash = "" hash = ""
} }
// Resort index first (required by updateFile) return in.updateFileHashGiven(path, format, hash, markAsMetaFile)
in.resortIndex()
err := in.updateFileHashGiven(path, format, hash, mod)
if err != nil {
return err
}
in.resortIndex()
return nil
} }
// FindMod finds a mod in the index and returns it's path and whether it has been found // FindMod finds a mod in the index and returns its path and whether it has been found
func (in Index) FindMod(modName string) (string, bool) { func (in Index) FindMod(modName string) (string, bool) {
for _, v := range in.Files { for p, v := range in.Files {
if v.MetaFile { if v.IsMetaFile() {
_, file := filepath.Split(v.File) _, fileName := path.Split(p)
fileTrimmed := strings.TrimSuffix(strings.TrimSuffix(file, MetaExtension), MetaExtensionOld) fileTrimmed := strings.TrimSuffix(strings.TrimSuffix(fileName, MetaExtension), MetaExtensionOld)
if fileTrimmed == modName { if fileTrimmed == modName {
return filepath.Join(filepath.Dir(in.indexFile), filepath.FromSlash(v.File)), true return in.ResolveIndexPath(p), true
} }
} }
} }
return "", false return "", false
} }
// GetAllMods finds paths to every metadata file (Mod) in the index // getAllMods finds paths to every metadata file (Mod) in the index
func (in Index) GetAllMods() []string { func (in Index) getAllMods() []string {
var list []string var list []string
baseDir := filepath.Dir(in.indexFile) for p, v := range in.Files {
for _, v := range in.Files { if v.IsMetaFile() {
if v.MetaFile { list = append(list, in.ResolveIndexPath(p))
list = append(list, filepath.Join(baseDir, filepath.FromSlash(v.File)))
} }
} }
return list return list
@ -395,7 +318,7 @@ func (in Index) GetAllMods() []string {
// LoadAllMods reads all metadata files into Mod structs // LoadAllMods reads all metadata files into Mod structs
func (in Index) LoadAllMods() ([]*Mod, error) { func (in Index) LoadAllMods() ([]*Mod, error) {
modPaths := in.GetAllMods() modPaths := in.getAllMods()
mods := make([]*Mod, len(modPaths)) mods := make([]*Mod, len(modPaths))
for i, v := range modPaths { for i, v := range modPaths {
modData, err := LoadMod(v) modData, err := LoadMod(v)
@ -406,40 +329,3 @@ func (in Index) LoadAllMods() ([]*Mod, error) {
} }
return mods, nil return mods, nil
} }
// GetFilePath attempts to get the path of the destination index file as it is stored on disk
func (in Index) GetFilePath(f IndexFile) string {
return filepath.Join(filepath.Dir(in.indexFile), filepath.FromSlash(f.File))
}
// SaveFile attempts to read the file from disk
func (in Index) SaveFile(f IndexFile, dest io.Writer) error {
hashFormat := f.HashFormat
if hashFormat == "" {
hashFormat = in.HashFormat
}
src, err := os.Open(in.GetFilePath(f))
defer func(src *os.File) {
_ = src.Close()
}(src)
if err != nil {
return err
}
h, err := GetHashImpl(hashFormat)
if err != nil {
return err
}
w := io.MultiWriter(h, dest)
_, err = io.Copy(w, src)
if err != nil {
return err
}
calculatedHash := h.HashToString(h.Sum(nil))
if !strings.EqualFold(calculatedHash, f.Hash) && !viper.GetBool("no-internal-hashes") {
return errors.New("hash of saved file is invalid")
}
return nil
}

191
core/indexfiles.go Normal file
View File

@ -0,0 +1,191 @@
package core
import (
"golang.org/x/exp/slices"
"path"
)
// IndexFiles are stored as a map of path -> (indexFile or alias -> indexFile)
// The latter is used for multiple copies with the same path but different alias
type IndexFiles map[string]IndexPathHolder
type IndexPathHolder interface {
updateHash(hash string, format string)
markFound()
markMetaFile()
markedFound() bool
IsMetaFile() bool
}
// indexFile is a file in the index
type indexFile struct {
// Files are stored in forward-slash format relative to the index file
File string `toml:"file"`
Hash string `toml:"hash,omitempty"`
HashFormat string `toml:"hash-format,omitempty"`
Alias string `toml:"alias,omitempty"`
MetaFile bool `toml:"metafile,omitempty"` // True when it is a .toml metadata file
Preserve bool `toml:"preserve,omitempty"` // Don't overwrite the file when updating
fileFound bool
}
func (i *indexFile) updateHash(hash string, format string) {
i.Hash = hash
i.HashFormat = format
}
func (i *indexFile) markFound() {
i.fileFound = true
}
func (i *indexFile) markMetaFile() {
i.MetaFile = true
}
func (i *indexFile) markedFound() bool {
return i.fileFound
}
func (i *indexFile) IsMetaFile() bool {
return i.MetaFile
}
type indexFileMultipleAlias map[string]indexFile
func (i *indexFileMultipleAlias) updateHash(hash string, format string) {
for k, v := range *i {
v.updateHash(hash, format)
(*i)[k] = v // Can't mutate map value in place
}
}
// (indexFileMultipleAlias == map[string]indexFile)
func (i *indexFileMultipleAlias) markFound() {
for k, v := range *i {
v.markFound()
(*i)[k] = v // Can't mutate map value in place
}
}
func (i *indexFileMultipleAlias) markMetaFile() {
for k, v := range *i {
v.markMetaFile()
(*i)[k] = v // Can't mutate map value in place
}
}
func (i *indexFileMultipleAlias) markedFound() bool {
for _, v := range *i {
return v.markedFound()
}
panic("No entries in indexFileMultipleAlias")
}
func (i *indexFileMultipleAlias) IsMetaFile() bool {
for _, v := range *i {
return v.MetaFile
}
panic("No entries in indexFileMultipleAlias")
}
// updateFileEntry updates the hash of a file and marks as found; adding it if it doesn't exist
// This also sets metafile if markAsMetaFile is set
// This updates all existing aliassed variants of a file, but doesn't create new ones
func (f *IndexFiles) updateFileEntry(path string, format string, hash string, markAsMetaFile bool) {
// Ensure map is non-nil
if *f == nil {
*f = make(IndexFiles)
}
// Fetch existing entry
file, found := (*f)[path]
if found {
// Exists: update hash/format/metafile
file.markFound()
file.updateHash(hash, format)
if markAsMetaFile {
file.markMetaFile()
}
// (don't do anything if markAsMetaFile is false - don't reset metafile status of existing metafiles)
} else {
// Doesn't exist: create new file data
newFile := indexFile{
File: path,
Hash: hash,
HashFormat: format,
MetaFile: markAsMetaFile,
fileFound: true,
}
(*f)[path] = &newFile
}
}
type indexFilesTomlRepresentation []indexFile
// toMemoryRep converts the TOML representation of IndexFiles to that used in memory
// These silly converter functions are necessary because the TOML libraries don't support custom non-primitive serializers
func (rep indexFilesTomlRepresentation) toMemoryRep() IndexFiles {
out := make(IndexFiles)
// Add entries to map
for _, v := range rep {
v := v // Narrow scope of loop variable
v.File = path.Clean(v.File)
v.Alias = path.Clean(v.Alias)
// path.Clean converts "" into "." - undo this for Alias as we use omitempty
if v.Alias == "." {
v.Alias = ""
}
if existing, ok := out[v.File]; ok {
if existingFile, ok := existing.(*indexFile); ok {
// Is this the same as the existing file?
if v.Alias == existingFile.Alias {
// Yes: overwrite
out[v.File] = &v
} else {
// No: convert to new map
m := make(indexFileMultipleAlias)
m[existingFile.Alias] = *existingFile
m[v.Alias] = v
out[v.File] = &m
}
} else if existingMap, ok := existing.(*indexFileMultipleAlias); ok {
// Add to alias map
(*existingMap)[v.Alias] = v
} else {
panic("Unknown type in IndexFiles")
}
} else {
out[v.File] = &v
}
}
return out
}
// toTomlRep converts the in-memory representation of IndexFiles to that used in TOML
// These silly converter functions are necessary because the TOML libraries don't support custom non-primitive serializers
func (f *IndexFiles) toTomlRep() indexFilesTomlRepresentation {
// Turn internal representation into TOML representation
rep := make(indexFilesTomlRepresentation, 0, len(*f))
for _, v := range *f {
if file, ok := v.(*indexFile); ok {
rep = append(rep, *file)
} else if file, ok := v.(*indexFileMultipleAlias); ok {
for _, alias := range *file {
rep = append(rep, alias)
}
} else {
panic("Unknown type in IndexFiles")
}
}
slices.SortFunc(rep, func(a indexFile, b indexFile) bool {
if a.File == b.File {
return a.Alias < b.Alias
} else {
return a.File < b.File
}
})
return rep
}

View File

@ -12,7 +12,7 @@ type Updater interface {
ParseUpdate(map[string]interface{}) (interface{}, error) ParseUpdate(map[string]interface{}) (interface{}, error)
// CheckUpdate checks whether there is an update for each of the mods in the given slice, // CheckUpdate checks whether there is an update for each of the mods in the given slice,
// called for all of the mods that this updater handles // called for all of the mods that this updater handles
CheckUpdate([]Mod, Pack) ([]UpdateCheck, error) CheckUpdate([]*Mod, Pack) ([]UpdateCheck, error)
// DoUpdate carries out the update previously queried in CheckUpdate, on each Mod's metadata, // DoUpdate carries out the update previously queried in CheckUpdate, on each Mod's metadata,
// given pointers to Mods and the value of CachedState for each mod // given pointers to Mods and the value of CachedState for each mod
DoUpdate([]*Mod, []interface{}) error DoUpdate([]*Mod, []interface{}) error

View File

@ -110,9 +110,6 @@ func (pack *Pack) UpdateIndexHash() error {
fileNative := filepath.FromSlash(pack.Index.File) fileNative := filepath.FromSlash(pack.Index.File)
indexFile := filepath.Join(filepath.Dir(viper.GetString("pack-file")), fileNative) indexFile := filepath.Join(filepath.Dir(viper.GetString("pack-file")), fileNative)
if filepath.IsAbs(pack.Index.File) {
indexFile = pack.Index.File
}
f, err := os.Open(indexFile) f, err := os.Open(indexFile)
if err != nil { if err != nil {

View File

@ -375,7 +375,7 @@ type cachedStateStore struct {
fileInfo *modFileInfo fileInfo *modFileInfo
} }
func (u cfUpdater) CheckUpdate(mods []core.Mod, pack core.Pack) ([]core.UpdateCheck, error) { func (u cfUpdater) CheckUpdate(mods []*core.Mod, pack core.Pack) ([]core.UpdateCheck, error) {
results := make([]core.UpdateCheck, len(mods)) results := make([]core.UpdateCheck, len(mods))
modIDs := make([]uint32, len(mods)) modIDs := make([]uint32, len(mods))
modInfos := make([]modInfo, len(mods)) modInfos := make([]modInfo, len(mods))

View File

@ -10,7 +10,6 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
"os" "os"
"path/filepath"
"strconv" "strconv"
) )
@ -59,9 +58,6 @@ var exportCmd = &cobra.Command{
os.Exit(1) os.Exit(1)
} }
// TODO: should index just expose indexPath itself, through a function?
indexPath := filepath.Join(filepath.Dir(viper.GetString("pack-file")), filepath.FromSlash(pack.Index.File))
fmt.Println("Reading external files...") fmt.Println("Reading external files...")
mods, err := index.LoadAllMods() mods, err := index.LoadAllMods()
if err != nil { if err != nil {
@ -139,7 +135,7 @@ var exportCmd = &cobra.Command{
cmdshared.ListManualDownloads(session) cmdshared.ListManualDownloads(session)
for dl := range session.StartDownloads() { for dl := range session.StartDownloads() {
_ = cmdshared.AddToZip(dl, exp, "overrides", indexPath) _ = cmdshared.AddToZip(dl, exp, "overrides", &index)
} }
err = session.SaveIndex() err = session.SaveIndex()
@ -173,31 +169,7 @@ var exportCmd = &cobra.Command{
os.Exit(1) os.Exit(1)
} }
i = 0 cmdshared.AddNonMetafileOverrides(&index, exp)
for _, v := range index.Files {
if !v.MetaFile {
// Save all non-metadata files into the zip
path, err := filepath.Rel(filepath.Dir(indexPath), index.GetFilePath(v))
if err != nil {
fmt.Printf("Error resolving file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
file, err := exp.Create(filepath.ToSlash(filepath.Join("overrides", path)))
if err != nil {
fmt.Printf("Error creating file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
err = index.SaveFile(v, file)
if err != nil {
fmt.Printf("Error copying file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
i++
}
}
err = exp.Close() err = exp.Close()
if err != nil { if err != nil {

View File

@ -285,9 +285,8 @@ var importCmd = &cobra.Command{
} }
successes = 0 successes = 0
packRoot := index.GetPackRoot()
for _, v := range filesList { for _, v := range filesList {
filePath := filepath.Join(packRoot, filepath.FromSlash(v.Name())) filePath := index.ResolveIndexPath(v.Name())
filePathAbs, err := filepath.Abs(filePath) filePathAbs, err := filepath.Abs(filePath)
if err == nil { if err == nil {
found := false found := false

View File

@ -145,9 +145,11 @@ var installCmd = &cobra.Command{
for len(depIDPendingQueue) > 0 && cycles < maxCycles { for len(depIDPendingQueue) > 0 && cycles < maxCycles {
if installedIDList == nil { if installedIDList == nil {
// Get modids of all mods // Get modids of all mods
for _, modPath := range index.GetAllMods() { mods, err := index.LoadAllMods()
mod, err := core.LoadMod(modPath) if err != nil {
if err == nil { fmt.Printf("Failed to determine existing projects: %v\n", err)
} else {
for _, mod := range mods {
data, ok := mod.GetParsedUpdateData("curseforge") data, ok := mod.GetParsedUpdateData("curseforge")
if ok { if ok {
updateData, ok := data.(cfUpdateData) updateData, ok := data.(cfUpdateData)

View File

@ -9,7 +9,6 @@ import (
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"net/url" "net/url"
"os" "os"
"path/filepath"
"strconv" "strconv"
"github.com/packwiz/packwiz/core" "github.com/packwiz/packwiz/core"
@ -55,9 +54,6 @@ var exportCmd = &cobra.Command{
return return
} }
// TODO: should index just expose indexPath itself, through a function?
indexPath := filepath.Join(filepath.Dir(viper.GetString("pack-file")), filepath.FromSlash(pack.Index.File))
fmt.Println("Reading external files...") fmt.Println("Reading external files...")
mods, err := index.LoadAllMods() mods, err := index.LoadAllMods()
if err != nil { if err != nil {
@ -113,15 +109,13 @@ var exportCmd = &cobra.Command{
fmt.Printf("Warning for %s (%s): %v\n", dl.Mod.Name, dl.Mod.FileName, warning) fmt.Printf("Warning for %s (%s): %v\n", dl.Mod.Name, dl.Mod.FileName, warning)
} }
pathForward, err := filepath.Rel(filepath.Dir(indexPath), dl.Mod.GetDestFilePath()) path, err := index.RelIndexPath(dl.Mod.GetDestFilePath())
if err != nil { if err != nil {
fmt.Printf("Error resolving external file: %s\n", err.Error()) fmt.Printf("Error resolving external file: %s\n", err.Error())
// TODO: exit(1)? // TODO: exit(1)?
continue continue
} }
path := filepath.ToSlash(pathForward)
hashes := make(map[string]string) hashes := make(map[string]string)
hashes["sha1"] = dl.Hashes["sha1"] hashes["sha1"] = dl.Hashes["sha1"]
hashes["sha512"] = dl.Hashes["sha512"] hashes["sha512"] = dl.Hashes["sha512"]
@ -170,11 +164,11 @@ var exportCmd = &cobra.Command{
fmt.Printf("%s (%s) added to manifest\n", dl.Mod.Name, dl.Mod.FileName) fmt.Printf("%s (%s) added to manifest\n", dl.Mod.Name, dl.Mod.FileName)
} else { } else {
if dl.Mod.Side == core.ClientSide { if dl.Mod.Side == core.ClientSide {
_ = cmdshared.AddToZip(dl, exp, "client-overrides", indexPath) _ = cmdshared.AddToZip(dl, exp, "client-overrides", &index)
} else if dl.Mod.Side == core.ServerSide { } else if dl.Mod.Side == core.ServerSide {
_ = cmdshared.AddToZip(dl, exp, "server-overrides", indexPath) _ = cmdshared.AddToZip(dl, exp, "server-overrides", &index)
} else { } else {
_ = cmdshared.AddToZip(dl, exp, "overrides", indexPath) _ = cmdshared.AddToZip(dl, exp, "overrides", &index)
} }
} }
} }
@ -233,29 +227,7 @@ var exportCmd = &cobra.Command{
os.Exit(1) os.Exit(1)
} }
for _, v := range index.Files { cmdshared.AddNonMetafileOverrides(&index, exp)
if !v.MetaFile {
// Save all non-metadata files into the zip
path, err := filepath.Rel(filepath.Dir(indexPath), index.GetFilePath(v))
if err != nil {
fmt.Printf("Error resolving file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
file, err := exp.Create(filepath.ToSlash(filepath.Join("overrides", path)))
if err != nil {
fmt.Printf("Error creating file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
err = index.SaveFile(v, file)
if err != nil {
fmt.Printf("Error copying file: %s\n", err.Error())
// TODO: exit(1)?
continue
}
}
}
err = exp.Close() err = exp.Close()
if err != nil { if err != nil {

View File

@ -386,9 +386,12 @@ func getBestHash(v *modrinthApi.File) (string, string) {
func getInstalledProjectIDs(index *core.Index) []string { func getInstalledProjectIDs(index *core.Index) []string {
var installedProjects []string var installedProjects []string
for _, modPath := range index.GetAllMods() { // Get modids of all mods
mod, err := core.LoadMod(modPath) mods, err := index.LoadAllMods()
if err == nil { if err != nil {
fmt.Printf("Failed to determine existing projects: %v\n", err)
} else {
for _, mod := range mods {
data, ok := mod.GetParsedUpdateData("modrinth") data, ok := mod.GetParsedUpdateData("modrinth")
if ok { if ok {
updateData, ok := data.(mrUpdateData) updateData, ok := data.(mrUpdateData)

View File

@ -35,7 +35,7 @@ type cachedStateStore struct {
Version *modrinthApi.Version Version *modrinthApi.Version
} }
func (u mrUpdater) CheckUpdate(mods []core.Mod, pack core.Pack) ([]core.UpdateCheck, error) { func (u mrUpdater) CheckUpdate(mods []*core.Mod, pack core.Pack) ([]core.UpdateCheck, error) {
results := make([]core.UpdateCheck, len(mods)) results := make([]core.UpdateCheck, len(mods))
for i, mod := range mods { for i, mod := range mods {