Refactor downloadCached() interface
(cherry picked from commit df3f5a78d5ab0a1f2dc9d288b271b38a9b8b33b5)
This commit is contained in:
		
							parent
							
								
									7b9c68766d
								
							
						
					
					
						commit
						f8b30338ac
					
				
					 6 changed files with 63 additions and 49 deletions
				
			
		| 
						 | 
				
			
			@ -45,9 +45,11 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
 | 
			
		|||
 | 
			
		||||
Path lookupFileArg(EvalState & state, string s)
 | 
			
		||||
{
 | 
			
		||||
    if (isUri(s))
 | 
			
		||||
        return getDownloader()->downloadCached(state.store, s, true).path;
 | 
			
		||||
    else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
 | 
			
		||||
    if (isUri(s)) {
 | 
			
		||||
        CachedDownloadRequest request(s);
 | 
			
		||||
        request.unpack = true;
 | 
			
		||||
        return getDownloader()->downloadCached(state.store, request).path;
 | 
			
		||||
    } else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
 | 
			
		||||
        Path p = s.substr(1, s.size() - 2);
 | 
			
		||||
        return state.findFile(p);
 | 
			
		||||
    } else
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -677,7 +677,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
 | 
			
		|||
 | 
			
		||||
    if (isUri(elem.second)) {
 | 
			
		||||
        try {
 | 
			
		||||
            res = { true, getDownloader()->downloadCached(store, elem.second, true).path };
 | 
			
		||||
            CachedDownloadRequest request(elem.second);
 | 
			
		||||
            request.unpack = true;
 | 
			
		||||
            res = { true, getDownloader()->downloadCached(store, request).path };
 | 
			
		||||
        } catch (DownloadError & e) {
 | 
			
		||||
            printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
 | 
			
		||||
            res = { false, "" };
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -2050,9 +2050,9 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
 | 
			
		|||
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
 | 
			
		||||
    const string & who, bool unpack, const std::string & defaultName)
 | 
			
		||||
{
 | 
			
		||||
    string url;
 | 
			
		||||
    Hash expectedHash;
 | 
			
		||||
    string name = defaultName;
 | 
			
		||||
    CachedDownloadRequest request("");
 | 
			
		||||
    request.unpack = unpack;
 | 
			
		||||
    request.name = defaultName;
 | 
			
		||||
 | 
			
		||||
    state.forceValue(*args[0]);
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -2063,27 +2063,27 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
 | 
			
		|||
        for (auto & attr : *args[0]->attrs) {
 | 
			
		||||
            string n(attr.name);
 | 
			
		||||
            if (n == "url")
 | 
			
		||||
                url = state.forceStringNoCtx(*attr.value, *attr.pos);
 | 
			
		||||
                request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
 | 
			
		||||
            else if (n == "sha256")
 | 
			
		||||
                expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
 | 
			
		||||
                request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
 | 
			
		||||
            else if (n == "name")
 | 
			
		||||
                name = state.forceStringNoCtx(*attr.value, *attr.pos);
 | 
			
		||||
                request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
 | 
			
		||||
            else
 | 
			
		||||
                throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        if (url.empty())
 | 
			
		||||
        if (request.uri.empty())
 | 
			
		||||
            throw EvalError(format("'url' argument required, at %1%") % pos);
 | 
			
		||||
 | 
			
		||||
    } else
 | 
			
		||||
        url = state.forceStringNoCtx(*args[0], pos);
 | 
			
		||||
        request.uri = state.forceStringNoCtx(*args[0], pos);
 | 
			
		||||
 | 
			
		||||
    state.checkURI(url);
 | 
			
		||||
    state.checkURI(request.uri);
 | 
			
		||||
 | 
			
		||||
    if (evalSettings.pureEval && !expectedHash)
 | 
			
		||||
    if (evalSettings.pureEval && !request.expectedHash)
 | 
			
		||||
        throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
 | 
			
		||||
 | 
			
		||||
    Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash).path;
 | 
			
		||||
    Path res = getDownloader()->downloadCached(state.store, request).path;
 | 
			
		||||
 | 
			
		||||
    if (state.allowedPaths)
 | 
			
		||||
        state.allowedPaths->insert(res);
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -293,10 +293,10 @@ struct CurlDownloader : public Downloader
 | 
			
		|||
            long httpStatus = 0;
 | 
			
		||||
            curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
 | 
			
		||||
 | 
			
		||||
            char * effectiveUrlCStr;
 | 
			
		||||
            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr);
 | 
			
		||||
            if (effectiveUrlCStr)
 | 
			
		||||
                result.effectiveUrl = effectiveUrlCStr;
 | 
			
		||||
            char * effectiveUriCStr;
 | 
			
		||||
            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
 | 
			
		||||
            if (effectiveUriCStr)
 | 
			
		||||
                result.effectiveUri = effectiveUriCStr;
 | 
			
		||||
 | 
			
		||||
            debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
 | 
			
		||||
                request.verb(), request.uri, code, httpStatus, result.bodySize);
 | 
			
		||||
| 
						 | 
				
			
			@ -737,18 +737,20 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
 | 
			
		|||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl)
 | 
			
		||||
CachedDownloadResult Downloader::downloadCached(
 | 
			
		||||
    ref<Store> store, const CachedDownloadRequest & request)
 | 
			
		||||
{
 | 
			
		||||
    auto url = resolveUri(url_);
 | 
			
		||||
    auto url = resolveUri(request.uri);
 | 
			
		||||
 | 
			
		||||
    auto name = request.name;
 | 
			
		||||
    if (name == "") {
 | 
			
		||||
        auto p = url.rfind('/');
 | 
			
		||||
        if (p != string::npos) name = string(url, p + 1);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    Path expectedStorePath;
 | 
			
		||||
    if (expectedHash) {
 | 
			
		||||
        expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name);
 | 
			
		||||
    if (request.expectedHash) {
 | 
			
		||||
        expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
 | 
			
		||||
        if (store->isValidPath(expectedStorePath)) {
 | 
			
		||||
            CachedDownloadResult result;
 | 
			
		||||
            result.storePath = expectedStorePath;
 | 
			
		||||
| 
						 | 
				
			
			@ -782,10 +784,9 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
 | 
			
		|||
            auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
 | 
			
		||||
            if (ss.size() >= 3 && ss[0] == url) {
 | 
			
		||||
                time_t lastChecked;
 | 
			
		||||
                if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) {
 | 
			
		||||
                if (string2Int(ss[2], lastChecked) && lastChecked + request.ttl >= time(0)) {
 | 
			
		||||
                    skip = true;
 | 
			
		||||
                    if (effectiveUrl)
 | 
			
		||||
                        *effectiveUrl = url_;
 | 
			
		||||
                    result.effectiveUri = request.uri;
 | 
			
		||||
                    result.etag = ss[1];
 | 
			
		||||
                } else if (!ss[1].empty()) {
 | 
			
		||||
                    debug(format("verifying previous ETag '%1%'") % ss[1]);
 | 
			
		||||
| 
						 | 
				
			
			@ -799,18 +800,17 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
 | 
			
		|||
    if (!skip) {
 | 
			
		||||
 | 
			
		||||
        try {
 | 
			
		||||
            DownloadRequest request(url);
 | 
			
		||||
            request.expectedETag = expectedETag;
 | 
			
		||||
            auto res = download(request);
 | 
			
		||||
            if (effectiveUrl)
 | 
			
		||||
                *effectiveUrl = res.effectiveUrl;
 | 
			
		||||
            DownloadRequest request2(url);
 | 
			
		||||
            request2.expectedETag = expectedETag;
 | 
			
		||||
            auto res = download(request2);
 | 
			
		||||
            result.effectiveUri = res.effectiveUri;
 | 
			
		||||
            result.etag = res.etag;
 | 
			
		||||
 | 
			
		||||
            if (!res.cached) {
 | 
			
		||||
                ValidPathInfo info;
 | 
			
		||||
                StringSink sink;
 | 
			
		||||
                dumpString(*res.data, sink);
 | 
			
		||||
                Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data);
 | 
			
		||||
                Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
 | 
			
		||||
                info.path = store->makeFixedOutputPath(false, hash, name);
 | 
			
		||||
                info.narHash = hashString(htSHA256, *sink.s);
 | 
			
		||||
                info.narSize = sink.s->size();
 | 
			
		||||
| 
						 | 
				
			
			@ -830,7 +830,7 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
 | 
			
		|||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (unpack) {
 | 
			
		||||
    if (request.unpack) {
 | 
			
		||||
        Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
 | 
			
		||||
        PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
 | 
			
		||||
        Path unpackedStorePath;
 | 
			
		||||
| 
						 | 
				
			
			@ -853,11 +853,11 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
 | 
			
		|||
    }
 | 
			
		||||
 | 
			
		||||
    if (expectedStorePath != "" && storePath != expectedStorePath) {
 | 
			
		||||
        Hash gotHash = unpack
 | 
			
		||||
            ? hashPath(expectedHash.type, store->toRealPath(storePath)).first
 | 
			
		||||
            : hashFile(expectedHash.type, store->toRealPath(storePath));
 | 
			
		||||
        Hash gotHash = request.unpack
 | 
			
		||||
            ? hashPath(request.expectedHash.type, store->toRealPath(storePath)).first
 | 
			
		||||
            : hashFile(request.expectedHash.type, store->toRealPath(storePath));
 | 
			
		||||
        throw nix::Error("hash mismatch in file downloaded from '%s':\n  wanted: %s\n  got:    %s",
 | 
			
		||||
            url, expectedHash.to_string(), gotHash.to_string());
 | 
			
		||||
            url, request.expectedHash.to_string(), gotHash.to_string());
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    result.storePath = storePath;
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -57,11 +57,23 @@ struct DownloadResult
 | 
			
		|||
{
 | 
			
		||||
    bool cached = false;
 | 
			
		||||
    std::string etag;
 | 
			
		||||
    std::string effectiveUrl;
 | 
			
		||||
    std::string effectiveUri;
 | 
			
		||||
    std::shared_ptr<std::string> data;
 | 
			
		||||
    uint64_t bodySize = 0;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
struct CachedDownloadRequest
 | 
			
		||||
{
 | 
			
		||||
    std::string uri;
 | 
			
		||||
    bool unpack = false;
 | 
			
		||||
    std::string name;
 | 
			
		||||
    Hash expectedHash;
 | 
			
		||||
    unsigned int ttl = settings.tarballTtl;
 | 
			
		||||
 | 
			
		||||
    CachedDownloadRequest(const std::string & uri)
 | 
			
		||||
        : uri(uri) { }
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
struct CachedDownloadResult
 | 
			
		||||
{
 | 
			
		||||
    // Note: 'storePath' may be different from 'path' when using a
 | 
			
		||||
| 
						 | 
				
			
			@ -69,6 +81,7 @@ struct CachedDownloadResult
 | 
			
		|||
    Path storePath;
 | 
			
		||||
    Path path;
 | 
			
		||||
    std::optional<std::string> etag;
 | 
			
		||||
    std::string effectiveUri;
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
class Store;
 | 
			
		||||
| 
						 | 
				
			
			@ -96,10 +109,7 @@ struct Downloader
 | 
			
		|||
       and is more recent than ‘tarball-ttl’ seconds. Otherwise,
 | 
			
		||||
       use the recorded ETag to verify if the server has a more
 | 
			
		||||
       recent version, and if so, download it to the Nix store. */
 | 
			
		||||
    CachedDownloadResult downloadCached(
 | 
			
		||||
        ref<Store> store, const string & uri, bool unpack, string name = "",
 | 
			
		||||
        const Hash & expectedHash = Hash(), string * effectiveUri = nullptr,
 | 
			
		||||
        int ttl = settings.tarballTtl);
 | 
			
		||||
    CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
 | 
			
		||||
 | 
			
		||||
    enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
 | 
			
		||||
};
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -86,10 +86,12 @@ static void update(const StringSet & channelNames)
 | 
			
		|||
        // We want to download the url to a file to see if it's a tarball while also checking if we
 | 
			
		||||
        // got redirected in the process, so that we can grab the various parts of a nix channel
 | 
			
		||||
        // definition from a consistent location if the redirect changes mid-download.
 | 
			
		||||
        std::string effectiveUrl;
 | 
			
		||||
        CachedDownloadRequest request(url);
 | 
			
		||||
        request.ttl = 0;
 | 
			
		||||
        auto dl = getDownloader();
 | 
			
		||||
        auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl, 0).path;
 | 
			
		||||
        url = chomp(std::move(effectiveUrl));
 | 
			
		||||
        auto result = dl->downloadCached(store, request);
 | 
			
		||||
        auto filename = result.path;
 | 
			
		||||
        url = chomp(result.effectiveUri);
 | 
			
		||||
 | 
			
		||||
        // If the URL contains a version number, append it to the name
 | 
			
		||||
        // attribute (so that "nix-env -q" on the channels profile
 | 
			
		||||
| 
						 | 
				
			
			@ -121,12 +123,10 @@ static void update(const StringSet & channelNames)
 | 
			
		|||
            }
 | 
			
		||||
 | 
			
		||||
            // Download the channel tarball.
 | 
			
		||||
            auto fullURL = url + "/nixexprs.tar.xz";
 | 
			
		||||
            try {
 | 
			
		||||
                filename = dl->downloadCached(store, fullURL, false).path;
 | 
			
		||||
                filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
 | 
			
		||||
            } catch (DownloadError & e) {
 | 
			
		||||
                fullURL = url + "/nixexprs.tar.bz2";
 | 
			
		||||
                filename = dl->downloadCached(store, fullURL, false).path;
 | 
			
		||||
                filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
 | 
			
		||||
            }
 | 
			
		||||
            chomp(filename);
 | 
			
		||||
        }
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue