diff --git a/core/download.go b/core/download.go
new file mode 100644
index 0000000..092cf00
--- /dev/null
+++ b/core/download.go
@@ -0,0 +1,340 @@
+package core
+
+import (
+	"encoding/json"
+	"fmt"
+	"golang.org/x/exp/slices"
+	"io"
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"strings"
+	"sync"
+)
+
+type DownloadSession interface {
+	GetManualDownloads() []ManualDownload
+	StartDownloads(workers int) chan CompletedDownload
+}
+
+type CompletedDownload struct {
+	File         *os.File
+	DestFilePath string
+	Hashes       map[string]string
+	// Error indicates if/why downloading this file failed
+	Error error
+	// Warning indicates a message to show to the user regarding this file (download was successful, but had a problem)
+	Warning error
+}
+
+type downloadSessionInternal struct {
+	cacheIndex      CacheIndex
+	cacheFolder     string
+	hashesToObtain  []string
+	manualDownloads []ManualDownload
+	downloadTasks   []downloadTask
+}
+
+type downloadTask struct {
+	metaDownloaderData MetaDownloaderData
+	destFilePath       string
+	url                string
+	hashFormat         string
+	hash               string
+}
+
+func (d downloadSessionInternal) GetManualDownloads() []ManualDownload {
+	return d.manualDownloads
+}
+
+func (d downloadSessionInternal) StartDownloads(workers int) chan CompletedDownload {
+	tasks := make(chan downloadTask)
+	downloads := make(chan CompletedDownload)
+	var indexLock sync.RWMutex
+	for i := 0; i < workers; i++ {
+		go func() {
+			for task := range tasks {
+				// Lookup file in index
+				indexLock.RLock()
+				// Map hash stored in mod to cache hash format
+				storedHashFmtList, hasStoredHashFmt := d.cacheIndex.Hashes[task.hashFormat]
+				cacheHashFmtList := d.cacheIndex.Hashes[cacheHashFormat]
+				if hasStoredHashFmt {
+					hashIdx := slices.Index(storedHashFmtList, task.hash)
+					if hashIdx > -1 {
+						// Found in index; try using it!
+						cacheFileHash := cacheHashFmtList[hashIdx]
+						cacheFilePath := filepath.Join(d.cacheFolder, cacheFileHash[:2], cacheFileHash[2:])
+
+						// Find hashes already stored in the index
+						hashes := make(map[string]string)
+						hashesToObtain := slices.Clone(d.hashesToObtain)
+						for hashFormat, hashList := range d.cacheIndex.Hashes {
+							if len(hashList) > hashIdx {
+								hashes[hashFormat] = hashList[hashIdx]
+							}
+						}
+
+						indexLock.RUnlock()
+
+						// Assuming the file already exists, attempt to open it
+						file, err := os.Open(cacheFilePath)
+						if err == nil {
+							// Calculate hashes
+							if len(hashesToObtain) > 0 {
+								// TODO: this code needs to add more hashes to the index
+								err = teeHashes(cacheFileHash, cacheHashFormat, d.hashesToObtain, hashes, io.Discard, file)
+								if err != nil {
+									downloads <- CompletedDownload{
+										Error: fmt.Errorf("failed to read hashes of file %s from cache: %w", cacheFilePath, err),
+									}
+									continue
+								}
+							}
+
+							downloads <- CompletedDownload{
+								File:         file,
+								DestFilePath: task.destFilePath,
+								Hashes:       hashes,
+							}
+							continue
+						} else if !os.IsNotExist(err) {
+							// Some other error trying to open the file!
+							downloads <- CompletedDownload{
+								Error: fmt.Errorf("failed to read file %s from cache: %w", cacheFilePath, err),
+							}
+							continue
+						}
+					}
+				}
+				indexLock.RUnlock()
+
+				// Create temp file to download to
+				tempFile, err := ioutil.TempFile(filepath.Join(d.cacheFolder, "temp"), "download-tmp")
+				if err != nil {
+					downloads <- CompletedDownload{
+						Error: fmt.Errorf("failed to create temporary file for download: %w", err),
+					}
+					continue
+				}
+
+				hashes := make(map[string]string)
+				hashes[task.hashFormat] = task.hash
+
+				// TODO: do download
+				var file *os.File
+				indexLock.Lock()
+				// Update hashes in the index and open file
+				hashIdx := slices.Index(cacheHashFmtList, hashes[cacheHashFormat])
+				if hashIdx < 0 {
+					// Doesn't exist in the index; add as a new value
+					hashIdx = len(cacheHashFmtList)
+
+					cacheFileHash := cacheHashFmtList[hashIdx]
+					cacheFilePath := filepath.Join(d.cacheFolder, cacheFileHash[:2], cacheFileHash[2:])
+					// Create the containing directory
+					err = os.MkdirAll(filepath.Dir(cacheFilePath), 0755)
+					if err != nil {
+						_ = tempFile.Close()
+						indexLock.Unlock()
+						downloads <- CompletedDownload{
+							Error: fmt.Errorf("failed to create directories for file %s in cache: %w", cacheFilePath, err),
+						}
+						continue
+					}
+					// Create destination file
+					file, err = os.Create(cacheFilePath)
+					if err != nil {
+						_ = tempFile.Close()
+						indexLock.Unlock()
+						downloads <- CompletedDownload{
+							Error: fmt.Errorf("failed to write file %s to cache: %w", cacheFilePath, err),
+						}
+						continue
+					}
+					// Seek back to start of temp file
+					_, err = tempFile.Seek(0, 0)
+					if err != nil {
+						_ = file.Close()
+						_ = tempFile.Close()
+						indexLock.Unlock()
+						downloads <- CompletedDownload{
+							Error: fmt.Errorf("failed to seek temp file %s in cache: %w", tempFile.Name(), err),
+						}
+						continue
+					}
+					// Copy temporary file to cache
+					_, err = io.Copy(file, tempFile)
+					if err != nil {
+						_ = file.Close()
+						_ = tempFile.Close()
+						indexLock.Unlock()
+						downloads <- CompletedDownload{
+							Error: fmt.Errorf("failed to seek temp file %s in cache: %w", tempFile.Name(), err),
+						}
+						continue
+					}
+				} else {
+					// Exists in the index and should exist on disk; open for reading
+					cacheFileHash := cacheHashFmtList[hashIdx]
+					cacheFilePath := filepath.Join(d.cacheFolder, cacheFileHash[:2], cacheFileHash[2:])
+					file, err = os.Open(cacheFilePath)
+					if err != nil {
+						_ = tempFile.Close()
+						indexLock.Unlock()
+						downloads <- CompletedDownload{
+							Error: fmt.Errorf("failed to write file %s to cache: %w", cacheFilePath, err),
+						}
+						continue
+					}
+				}
+				// Close temporary file, as we are done with it
+				err = tempFile.Close()
+				if err != nil {
+					_ = file.Close()
+					indexLock.Unlock()
+					downloads <- CompletedDownload{
+						Error: fmt.Errorf("failed to close temporary file for download: %w", err),
+					}
+					continue
+				}
+				var warning error
+				for hashFormat, hashList := range d.cacheIndex.Hashes {
+					if hashIdx >= len(hashList) {
+						// Add empty values to make hashList fit hashIdx
+						hashList = append(hashList, make([]string, (hashIdx-len(hashList))+1)...)
+						d.cacheIndex.Hashes[hashFormat] = hashList
+					}
+					// Replace if it doesn't already exist
+					if hashList[hashIdx] == "" {
+						hashList[hashIdx] = hashes[hashFormat]
+					} else if hash, ok := hashes[hashFormat]; ok && hashList[hashIdx] != hash {
+						// Warn if the existing hash is inconsistent!
+						warning = fmt.Errorf("inconsistent %s hash for %s overwritten - value %s (expected %s)", hashFormat,
+							file.Name(), hashList[hashIdx], hash)
+						hashList[hashIdx] = hashes[hashFormat]
+					}
+				}
+				indexLock.Unlock()
+
+				downloads <- CompletedDownload{
+					File:         file,
+					DestFilePath: task.destFilePath,
+					Hashes:       hashes,
+					Warning:      warning,
+				}
+			}
+		}()
+	}
+	go func() {
+		for _, v := range d.downloadTasks {
+			tasks <- v
+		}
+	}()
+	return downloads
+}
+
+func teeHashes(validateHash string, validateHashFormat string, hashesToObtain []string, hashes map[string]string,
+	dst io.Writer, src io.Reader) error {
+	// TODO: implement
+}
+
+const cacheHashFormat = "sha256"
+
+type CacheIndex struct {
+	Version uint32
+	Hashes  map[string][]string
+}
+
+func CreateDownloadSession(mods []*Mod, hashesToObtain []string) (DownloadSession, error) {
+	// Load cache index
+	cacheIndex := CacheIndex{Version: 1, Hashes: make(map[string][]string)}
+	cachePath, err := GetPackwizCache()
+	if err != nil {
+		return nil, fmt.Errorf("failed to load cache: %w", err)
+	}
+	err = os.MkdirAll(cachePath, 0755)
+	if err != nil {
+		return nil, fmt.Errorf("failed to create cache directory: %w", err)
+	}
+	err = os.MkdirAll(filepath.Join(cachePath, "temp"), 0755)
+	if err != nil {
+		return nil, fmt.Errorf("failed to create cache temp directory: %w", err)
+	}
+	cacheIndexData, err := ioutil.ReadFile(filepath.Join(cachePath, "index.json"))
+	if err != nil {
+		if !os.IsNotExist(err) {
+			return nil, fmt.Errorf("failed to read cache index file: %w", err)
+		}
+	} else {
+		err = json.Unmarshal(cacheIndexData, &cacheIndex)
+		if err != nil {
+			return nil, fmt.Errorf("failed to read cache index file: %w", err)
+		}
+		if cacheIndex.Version > 1 {
+			return nil, fmt.Errorf("cache index is too new (version %v)", cacheIndex.Version)
+		}
+	}
+
+	// Ensure some parts of the index are initialised
+	_, hasCacheHashFmt := cacheIndex.Hashes[cacheHashFormat]
+	if !hasCacheHashFmt {
+		cacheIndex.Hashes[cacheHashFormat] = make([]string, 0)
+	}
+
+	// TODO: move in/ files?
+
+	// Create session
+	downloadSession := downloadSessionInternal{
+		cacheIndex:     cacheIndex,
+		cacheFolder:    cachePath,
+		hashesToObtain: hashesToObtain,
+	}
+
+	pendingMetadata := make(map[string][]*Mod)
+
+	// Get necessary metadata for all files
+	for _, mod := range mods {
+		if mod.Download.Mode == "url" {
+			downloadSession.downloadTasks = append(downloadSession.downloadTasks, downloadTask{
+				destFilePath: mod.GetDestFilePath(),
+				url:          mod.Download.URL,
+				hashFormat:   mod.Download.HashFormat,
+				hash:         mod.Download.Hash,
+			})
+		} else if strings.HasPrefix(mod.Download.Mode, "metadata:") {
+			dlID := strings.TrimPrefix(mod.Download.Mode, "metadata:")
+			pendingMetadata[dlID] = append(pendingMetadata[dlID], mod)
+		} else {
+			return nil, fmt.Errorf("unknown download mode %s for mod %s", mod.Download.Mode, mod.Name)
+		}
+	}
+
+	for dlID, mods := range pendingMetadata {
+		downloader, ok := MetaDownloaders[dlID]
+		if !ok {
+			return nil, fmt.Errorf("unknown download mode %s for mod %s", mods[0].Download.Mode, mods[0].Name)
+		}
+		meta, err := downloader.GetFilesMetadata(mods)
+		if err != nil {
+			return nil, fmt.Errorf("failed to retrieve %s files: %w", dlID, err)
+		}
+		for i, v := range mods {
+			isManual, manualDownload := meta[i].GetManualDownload()
+			if isManual {
+				downloadSession.manualDownloads = append(downloadSession.manualDownloads, manualDownload)
+			} else {
+				downloadSession.downloadTasks = append(downloadSession.downloadTasks, downloadTask{
+					destFilePath:       v.GetDestFilePath(),
+					metaDownloaderData: meta[i],
+					hashFormat:         v.Download.HashFormat,
+					hash:               v.Download.Hash,
+				})
+			}
+		}
+	}
+
+	// TODO: index housekeeping? i.e. remove deleted files, remove old files (LRU?)
+
+	return downloadSession, nil
+}
diff --git a/core/interfaces.go b/core/interfaces.go
index 9edc6bd..9189934 100644
--- a/core/interfaces.go
+++ b/core/interfaces.go
@@ -1,5 +1,7 @@
 package core
 
+import "io"
+
 // Updaters stores all the updaters that packwiz can use. Add your own update systems to this map, keyed by the configuration name.
 var Updaters = make(map[string]Updater)
 
@@ -30,3 +32,25 @@ type UpdateCheck struct {
 	// If an error is returned for a mod, or from CheckUpdate, DoUpdate is not called on that mod / at all
 	Error error
 }
+
+// MetaDownloaders stores all the metadata-based installers that packwiz can use. Add your own downloaders to this map, keyed by the source name.
+var MetaDownloaders = make(map[string]MetaDownloader)
+
+// MetaDownloader specifies a downloader for a Mod using a "metadata:source" mode
+// The calling code should handle caching and hash validation.
+type MetaDownloader interface {
+	GetFilesMetadata([]*Mod) ([]MetaDownloaderData, error)
+}
+
+// MetaDownloaderData specifies the per-Mod metadata retrieved for downloading
+type MetaDownloaderData interface {
+	GetManualDownload() (bool, ManualDownload)
+	DownloadFile(io.Writer) error
+}
+
+type ManualDownload struct {
+	Name     string
+	FileName string
+	DestPath string
+	URL      string
+}
diff --git a/core/mod.go b/core/mod.go
index 8291bf7..63c9348 100644
--- a/core/mod.go
+++ b/core/mod.go
@@ -27,11 +27,18 @@ type Mod struct {
 	Option *ModOption `toml:"option,omitempty"`
 }
 
+const (
+	modeURL string = "url"
+	modeCF  string = "metadata:curseforge"
+)
+
 // ModDownload specifies how to download the mod file
 type ModDownload struct {
-	URL        string `toml:"url"`
+	URL        string `toml:"url,omitempty"`
 	HashFormat string `toml:"hash-format"`
 	Hash       string `toml:"hash"`
+	// Mode defaults to modeURL (i.e. use URL)
+	Mode string `toml:"mode"`
 }
 
 // ModOption specifies optional metadata for this mod file
@@ -130,6 +137,7 @@ func (m Mod) GetDestFilePath() string {
 
 // DownloadFile attempts to resolve and download the file
 func (m Mod) DownloadFile(dest io.Writer) error {
+	// TODO: check mode
 	resp, err := http.Get(m.Download.URL)
 	// TODO: content type, user-agent?
 	if err != nil {
@@ -176,6 +184,7 @@ func (m Mod) GetHashes(hashes []string) (map[string]string, error) {
 
 	// Retrieve the remaining hashes
 	if len(hashes) > 0 {
+		// TODO: check mode
 		resp, err := http.Get(m.Download.URL)
 		// TODO: content type, user-agent?
 		if err != nil {
diff --git a/core/storeutil.go b/core/storeutil.go
index ff51a65..ed31727 100644
--- a/core/storeutil.go
+++ b/core/storeutil.go
@@ -44,3 +44,11 @@ func GetPackwizInstallBinFile() (string, error) {
 	}
 	return filepath.Join(binPath, exeName), nil
 }
+
+func GetPackwizCache() (string, error) {
+	localStore, err := GetPackwizLocalStore()
+	if err != nil {
+		return "", err
+	}
+	return filepath.Join(localStore, "cache"), nil
+}
diff --git a/curseforge/curseforge.go b/curseforge/curseforge.go
index 7972fef..86cb5a1 100644
--- a/curseforge/curseforge.go
+++ b/curseforge/curseforge.go
@@ -4,6 +4,7 @@ import (
 	"errors"
 	"github.com/spf13/viper"
 	"golang.org/x/exp/slices"
+	"io"
 	"path/filepath"
 	"regexp"
 	"strconv"
@@ -24,6 +25,7 @@ var curseforgeCmd = &cobra.Command{
 func init() {
 	cmd.Add(curseforgeCmd)
 	core.Updaters["curseforge"] = cfUpdater{}
+	core.MetaDownloaders["curseforge"] = cfDownloader{}
 }
 
 var snapshotVersionRegex = regexp.MustCompile("(?:Snapshot )?(\\d+)w0?(0|[1-9]\\d*)([a-z])")
@@ -442,3 +444,24 @@ func parseExportData(from map[string]interface{}) (cfExportData, error) {
 	err := mapstructure.Decode(from, &exportData)
 	return exportData, err
 }
+
+type cfDownloader struct{}
+
+func (c cfDownloader) GetFilesMetadata(mods []*core.Mod) ([]core.MetaDownloaderData, error) {
+	// TODO implement me
+	panic("implement me")
+}
+
+type cfDownloadMetadata struct {
+	url                string
+	allowsDistribution bool
+}
+
+func (m *cfDownloadMetadata) RequiresManualDownload() bool {
+	return !m.allowsDistribution
+}
+
+func (m *cfDownloadMetadata) DownloadFile(writer io.Writer) error {
+	// TODO implement me
+	panic("implement me")
+}