Significantly improve refreshing performance

Looping through every file on every file update *and* cleaning + fromslash is sloww with many files
(ideally, we'd have a map to look up the file, but this is already significantly faster)
This commit is contained in:
comp500 2023-03-24 01:00:05 +00:00
parent 8d50b97356
commit 37cc68a9f7

View File

@ -84,8 +84,10 @@ func (in *Index) updateFileHashGiven(path, format, hash string, mod bool) error
if err != nil { if err != nil {
return err return err
} }
slashPath := filepath.ToSlash(relPath)
// TODO: make this not a linear scan for every file update
for k, v := range in.Files { for k, v := range in.Files {
if filepath.Clean(filepath.FromSlash(v.File)) == relPath { if v.File == slashPath {
found = true found = true
// Update hash // Update hash
in.Files[k].Hash = hash in.Files[k].Hash = hash
@ -96,15 +98,14 @@ func (in *Index) updateFileHashGiven(path, format, hash string, mod bool) error
} }
// Mark this file as found // Mark this file as found
in.Files[k].fileExistsTemp = true in.Files[k].fileExistsTemp = true
// Clean up path if it's untidy in.Files[k].File = slashPath
in.Files[k].File = filepath.ToSlash(relPath)
// Don't break out of loop, as there may be aliased versions that // Don't break out of loop, as there may be aliased versions that
// also need to be updated // also need to be updated
} }
} }
if !found { if !found {
newFile := IndexFile{ newFile := IndexFile{
File: filepath.ToSlash(relPath), File: slashPath,
Hash: hash, Hash: hash,
fileExistsTemp: true, fileExistsTemp: true,
} }