Fix use of temp object in tokenizer
authorscantor <scantor@de75baf8-a10c-0410-a50a-987c0e22f00f>
Wed, 21 Dec 2011 19:33:51 +0000 (19:33 +0000)
committerscantor <scantor@de75baf8-a10c-0410-a50a-987c0e22f00f>
Wed, 21 Dec 2011 19:33:51 +0000 (19:33 +0000)
git-svn-id: https://svn.shibboleth.net/cpp-xmltooling/branches/REL_1@943 de75baf8-a10c-0410-a50a-987c0e22f00f

xmltooling/util/ParserPool.cpp

index a195dd7..ca155ca 100644 (file)
@@ -239,7 +239,8 @@ bool ParserPool::loadSchema(const XMLCh* nsURI, const XMLCh* pathname)
 
 bool ParserPool::loadCatalogs(const char* pathnames)
 {
-    boost::tokenizer< char_separator<char> > catpaths(string(pathnames), char_separator<char>(PATH_SEPARATOR_STR));
+    string temp(pathnames);
+    boost::tokenizer< char_separator<char> > catpaths(temp, char_separator<char>(PATH_SEPARATOR_STR));
     for_each(
         catpaths.begin(), catpaths.end(),
         // Call loadCatalog with an inner call to s->c_str() on each entry.