Templatise tokenizeString()
This commit is contained in:
		
							parent
							
								
									00092b2d35
								
							
						
					
					
						commit
						76e88871b2
					
				
					 10 changed files with 30 additions and 39 deletions
				
			
		|  | @ -10,7 +10,7 @@ namespace nix { | |||
| void findAlongAttrPath(EvalState & state, const string & attrPath, | ||||
|     Bindings & autoArgs, Expr * e, Value & v) | ||||
| { | ||||
|     Strings tokens = tokenizeString(attrPath, "."); | ||||
|     Strings tokens = tokenizeString<Strings>(attrPath, "."); | ||||
| 
 | ||||
|     Error attrError = | ||||
|         Error(format("attribute selection path `%1%' does not match expression") % attrPath); | ||||
|  |  | |||
|  | @ -179,7 +179,7 @@ EvalState::EvalState() | |||
| 
 | ||||
|     /* Initialise the Nix expression search path. */ | ||||
|     searchPathInsertionPoint = searchPath.end(); | ||||
|     Strings paths = tokenizeString(getEnv("NIX_PATH", ""), ":"); | ||||
|     Strings paths = tokenizeString<Strings>(getEnv("NIX_PATH", ""), ":"); | ||||
|     foreach (Strings::iterator, i, paths) addToSearchPath(*i); | ||||
|     addToSearchPath("nix=" + settings.nixDataDir + "/nix/corepkgs"); | ||||
|     searchPathInsertionPoint = searchPath.begin(); | ||||
|  |  | |||
|  | @ -363,7 +363,7 @@ static void prim_derivationStrict(EvalState & state, Value * * args, Value & v) | |||
|                     else throw EvalError(format("invalid value `%1%' for `outputHashMode' attribute") % s); | ||||
|                 } | ||||
|                 else if (key == "outputs") { | ||||
|                     Strings tmp = tokenizeString(s); | ||||
|                     Strings tmp = tokenizeString<Strings>(s); | ||||
|                     outputs.clear(); | ||||
|                     foreach (Strings::iterator, j, tmp) { | ||||
|                         if (outputs.find(*j) != outputs.end()) | ||||
|  |  | |||
|  | @ -1435,7 +1435,7 @@ HookReply DerivationGoal::tryBuildHook() | |||
|     /* Tell the hook about system features (beyond the system type)
 | ||||
|        required from the build machine.  (The hook could parse the | ||||
|        drv file itself, but this is easier.) */ | ||||
|     Strings features = tokenizeString(drv.env["requiredSystemFeatures"]); | ||||
|     Strings features = tokenizeString<Strings>(drv.env["requiredSystemFeatures"]); | ||||
|     foreach (Strings::iterator, i, features) checkStoreName(*i); /* !!! abuse */ | ||||
| 
 | ||||
|     /* Send the request to the hook. */ | ||||
|  | @ -1594,7 +1594,7 @@ void DerivationGoal::startBuilder() | |||
|        fixed-output derivations is by definition pure (since we | ||||
|        already know the cryptographic hash of the output). */ | ||||
|     if (fixedOutput) { | ||||
|         Strings varNames = tokenizeString(drv.env["impureEnvVars"]); | ||||
|         Strings varNames = tokenizeString<Strings>(drv.env["impureEnvVars"]); | ||||
|         foreach (Strings::iterator, i, varNames) env[*i] = getEnv(*i); | ||||
|     } | ||||
| 
 | ||||
|  | @ -1606,7 +1606,7 @@ void DerivationGoal::startBuilder() | |||
|        by `nix-store --register-validity'.  However, the deriver | ||||
|        fields are left empty. */ | ||||
|     string s = drv.env["exportReferencesGraph"]; | ||||
|     Strings ss = tokenizeString(s); | ||||
|     Strings ss = tokenizeString<Strings>(s); | ||||
|     if (ss.size() % 2 != 0) | ||||
|         throw BuildError(format("odd number of tokens in `exportReferencesGraph': `%1%'") % s); | ||||
|     for (Strings::iterator i = ss.begin(); i != ss.end(); ) { | ||||
|  | @ -1911,14 +1911,11 @@ void DerivationGoal::initChild() | |||
|                outside of the namespace.  Making a subtree private is | ||||
|                local to the namespace, though, so setting MS_PRIVATE | ||||
|                does not affect the outside world. */ | ||||
|             Strings mounts = tokenizeString(readFile("/proc/self/mountinfo", true), "\n"); | ||||
|             Strings mounts = tokenizeString<Strings>(readFile("/proc/self/mountinfo", true), "\n"); | ||||
|             foreach (Strings::iterator, i, mounts) { | ||||
|                 Strings fields = tokenizeString(*i, " "); | ||||
|                 assert(fields.size() >= 5); | ||||
|                 Strings::iterator j = fields.begin(); | ||||
|                 std::advance(j, 4); | ||||
|                 if (mount(0, j->c_str(), 0, MS_PRIVATE, 0) == -1) | ||||
|                     throw SysError(format("unable to make filesystem `%1%' private") % *j); | ||||
|                 vector<string> fields = tokenizeString<vector<string> >(*i, " "); | ||||
|                 if (mount(0, fields.at(4).c_str(), 0, MS_PRIVATE, 0) == -1) | ||||
|                     throw SysError(format("unable to make filesystem `%1%' private") % fields.at(4)); | ||||
|             } | ||||
| 
 | ||||
|             /* Bind-mount all the directories from the "host"
 | ||||
|  | @ -2053,7 +2050,7 @@ void DerivationGoal::initChild() | |||
| PathSet parseReferenceSpecifiers(const Derivation & drv, string attr) | ||||
| { | ||||
|     PathSet result; | ||||
|     Paths paths = tokenizeString(attr); | ||||
|     Paths paths = tokenizeString<Paths>(attr); | ||||
|     foreach (Strings::iterator, i, paths) { | ||||
|         if (isStorePath(*i)) | ||||
|             result.insert(*i); | ||||
|  |  | |||
|  | @ -373,7 +373,7 @@ static void addAdditionalRoots(StoreAPI & store, PathSet & roots) | |||
| 
 | ||||
|     string result = runProgram(rootFinder); | ||||
| 
 | ||||
|     Strings paths = tokenizeString(result, "\n"); | ||||
|     Strings paths = tokenizeString<Strings>(result, "\n"); | ||||
| 
 | ||||
|     foreach (Strings::iterator, i, paths) { | ||||
|         if (isInStore(*i)) { | ||||
|  |  | |||
|  | @ -65,15 +65,7 @@ void Settings::processEnvironment() | |||
|         substituters.push_back(nixLibexecDir + "/nix/substituters/download-using-manifests.pl"); | ||||
|         substituters.push_back(nixLibexecDir + "/nix/substituters/download-from-binary-cache.pl"); | ||||
|     } else | ||||
|         substituters = tokenizeString(subs, ":"); | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| string & at(Strings & ss, unsigned int n) | ||||
| { | ||||
|     Strings::iterator i = ss.begin(); | ||||
|     advance(i, n); | ||||
|     return *i; | ||||
|         substituters = tokenizeString<Strings>(subs, ":"); | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
|  | @ -95,15 +87,15 @@ void Settings::loadConfFile() | |||
|         if (hash != string::npos) | ||||
|             line = string(line, 0, hash); | ||||
| 
 | ||||
|         Strings tokens = tokenizeString(line); | ||||
|         vector<string> tokens = tokenizeString<vector<string> >(line); | ||||
|         if (tokens.empty()) continue; | ||||
| 
 | ||||
|         if (tokens.size() < 2 || at(tokens, 1) != "=") | ||||
|         if (tokens.size() < 2 || tokens[1] != "=") | ||||
|             throw Error(format("illegal configuration line `%1%' in `%2%'") % line % settingsFile); | ||||
| 
 | ||||
|         string name = at(tokens, 0); | ||||
|         string name = tokens[0]; | ||||
| 
 | ||||
|         Strings::iterator i = tokens.begin(); | ||||
|         vector<string>::iterator i = tokens.begin(); | ||||
|         advance(i, 2); | ||||
|         settings[name] = concatStringsSep(" ", Strings(i, tokens.end())); // FIXME: slow
 | ||||
|     }; | ||||
|  | @ -170,7 +162,7 @@ void Settings::get(PathSet & res, const string & name) | |||
|     SettingsMap::iterator i = settings.find(name); | ||||
|     if (i == settings.end()) return; | ||||
|     res.clear(); | ||||
|     Strings ss = tokenizeString(i->second); | ||||
|     Strings ss = tokenizeString<Strings>(i->second); | ||||
|     res.insert(ss.begin(), ss.end()); | ||||
| } | ||||
| 
 | ||||
|  |  | |||
|  | @ -1435,7 +1435,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source) | |||
|         /* Lock the output path.  But don't lock if we're being called
 | ||||
|            from a build hook (whose parent process already acquired a | ||||
|            lock on this path). */ | ||||
|         Strings locksHeld = tokenizeString(getEnv("NIX_HELD_LOCKS")); | ||||
|         Strings locksHeld = tokenizeString<Strings>(getEnv("NIX_HELD_LOCKS")); | ||||
|         if (find(locksHeld.begin(), locksHeld.end(), dstPath) == locksHeld.end()) | ||||
|             outputLock.lockPaths(singleton<PathSet, Path>(dstPath)); | ||||
| 
 | ||||
|  | @ -1645,7 +1645,7 @@ ValidPathInfo LocalStore::queryPathInfoOld(const Path & path) | |||
|     string info = readFile(infoFile); | ||||
| 
 | ||||
|     /* Parse it. */ | ||||
|     Strings lines = tokenizeString(info, "\n"); | ||||
|     Strings lines = tokenizeString<Strings>(info, "\n"); | ||||
| 
 | ||||
|     foreach (Strings::iterator, i, lines) { | ||||
|         string::size_type p = i->find(':'); | ||||
|  | @ -1654,7 +1654,7 @@ ValidPathInfo LocalStore::queryPathInfoOld(const Path & path) | |||
|         string name(*i, 0, p); | ||||
|         string value(*i, p + 2); | ||||
|         if (name == "References") { | ||||
|             Strings refs = tokenizeString(value, " "); | ||||
|             Strings refs = tokenizeString<Strings>(value, " "); | ||||
|             res.references = PathSet(refs.begin(), refs.end()); | ||||
|         } else if (name == "Deriver") { | ||||
|             res.deriver = value; | ||||
|  |  | |||
|  | @ -984,9 +984,9 @@ void _interrupted() | |||
| //////////////////////////////////////////////////////////////////////
 | ||||
| 
 | ||||
| 
 | ||||
| Strings tokenizeString(const string & s, const string & separators) | ||||
| template<class C> C tokenizeString(const string & s, const string & separators) | ||||
| { | ||||
|     Strings result; | ||||
|     C result; | ||||
|     string::size_type pos = s.find_first_not_of(separators, 0); | ||||
|     while (pos != string::npos) { | ||||
|         string::size_type end = s.find_first_of(separators, pos + 1); | ||||
|  | @ -998,6 +998,9 @@ Strings tokenizeString(const string & s, const string & separators) | |||
|     return result; | ||||
| } | ||||
| 
 | ||||
| template Strings tokenizeString(const string & s, const string & separators); | ||||
| template vector<string> tokenizeString(const string & s, const string & separators); | ||||
| 
 | ||||
| 
 | ||||
| string concatStringsSep(const string & sep, const Strings & ss) | ||||
| { | ||||
|  |  | |||
|  | @ -286,7 +286,7 @@ MakeError(Interrupted, BaseError) | |||
| 
 | ||||
| 
 | ||||
| /* String tokenizer. */ | ||||
| Strings tokenizeString(const string & s, const string & separators = " \t\n\r"); | ||||
| template<class C> C tokenizeString(const string & s, const string & separators = " \t\n\r"); | ||||
| 
 | ||||
| 
 | ||||
| /* Concatenate the given strings with a separator between the
 | ||||
|  |  | |||
|  | @ -193,16 +193,15 @@ static void run(int argc, char * * argv) | |||
|     if (st.st_mode & (S_IWGRP | S_IWOTH)) | ||||
|         throw Error(format("`%1%' should not be group or world-writable") % configFile); | ||||
| 
 | ||||
|     Strings tokens = tokenizeString(readFile(fdConfig)); | ||||
|     vector<string> tokens = tokenizeString<vector<string> >(readFile(fdConfig)); | ||||
| 
 | ||||
|     fdConfig.close(); | ||||
| 
 | ||||
|     if (tokens.size() != 2) | ||||
|         throw Error(format("parse error in `%1%'") % configFile); | ||||
| 
 | ||||
|     Strings::iterator i = tokens.begin(); | ||||
|     string nixUser = *i++; | ||||
|     string buildUsersGroup = *i++; | ||||
|     string nixUser = tokens[0]; | ||||
|     string buildUsersGroup = tokens[1]; | ||||
| 
 | ||||
| 
 | ||||
|     /* Check that the caller (real uid) is the one allowed to call
 | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue