[Libreoffice-commits] core.git: helpcompiler/source
Julien Nabet
serval2412 at yahoo.fr
Sun Mar 18 08:14:21 UTC 2018
helpcompiler/source/BasCodeTagger.cxx | 11 +++----
helpcompiler/source/HelpCompiler.cxx | 5 +--
helpcompiler/source/HelpIndexer.cxx | 5 ++-
helpcompiler/source/HelpLinker.cxx | 51 ++++++++++++----------------------
4 files changed, 29 insertions(+), 43 deletions(-)
New commits:
commit 93f8b703a0f0287354723855afe8e65297ecf716
Author: Julien Nabet <serval2412 at yahoo.fr>
Date: Sat Mar 17 22:06:55 2018 +0100
Use for-range loops in helpcompiler
Change-Id: I787a5b43cb09ac308082cac0e66540f975d79ead
Reviewed-on: https://gerrit.libreoffice.org/51473
Tested-by: Jenkins <ci at libreoffice.org>
Reviewed-by: Julien Nabet <serval2412 at yahoo.fr>
diff --git a/helpcompiler/source/BasCodeTagger.cxx b/helpcompiler/source/BasCodeTagger.cxx
index f59c1d268e0d..8af743e1b5f7 100644
--- a/helpcompiler/source/BasCodeTagger.cxx
+++ b/helpcompiler/source/BasCodeTagger.cxx
@@ -82,7 +82,7 @@ void BasicCodeTagger::getBasicCodeContainerNodes()
}
while ( !m_pXmlTreeWalker->end() )
{
- m_pXmlTreeWalker->nextNode();
+ m_pXmlTreeWalker->nextNode();
if ( !( xmlStrcmp( m_pXmlTreeWalker->currentNode()->name, reinterpret_cast<const xmlChar*>("bascode") ) ) )
{ //Found <bascode>
m_BasicCodeContainerTags.push_back( m_pXmlTreeWalker->currentNode() ); //it goes to the end of the list
@@ -139,14 +139,13 @@ void BasicCodeTagger::tagParagraph( xmlNodePtr paragraph )
RTL_TEXTENCODING_UTF8 );
std::vector<HighlightPortion> portions;
m_Highlighter.getHighlightPortions( strLine, portions );
- for (std::vector<HighlightPortion>::iterator i(portions.begin());
- i != portions.end(); ++i)
+ for (auto const& portion : portions)
{
- OString sToken(OUStringToOString(strLine.copy(i->nBegin, i->nEnd-i->nBegin), RTL_TEXTENCODING_UTF8));
+ OString sToken(OUStringToOString(strLine.copy(portion.nBegin, portion.nEnd-portion.nBegin), RTL_TEXTENCODING_UTF8));
xmlNodePtr text = xmlNewText(reinterpret_cast<const xmlChar*>(sToken.getStr()));
- if ( i->tokenType != TokenType::Whitespace )
+ if ( portion.tokenType != TokenType::Whitespace )
{
- xmlChar* typeStr = getTypeString( i->tokenType );
+ xmlChar* typeStr = getTypeString( portion.tokenType );
curNode = xmlNewTextChild( paragraph, nullptr, reinterpret_cast<xmlChar const *>("item"), nullptr );
xmlNewProp( curNode, reinterpret_cast<xmlChar const *>("type"), typeStr );
xmlAddChild( curNode, text );
diff --git a/helpcompiler/source/HelpCompiler.cxx b/helpcompiler/source/HelpCompiler.cxx
index 92856d04cf93..cc8c319e58a5 100644
--- a/helpcompiler/source/HelpCompiler.cxx
+++ b/helpcompiler/source/HelpCompiler.cxx
@@ -418,10 +418,9 @@ void myparser::traverse( xmlNodePtr parentNode )
//TODO: make these asserts and flush out all our broken help ids
SAL_WARN_IF(hidstr.empty(), "helpcompiler", "hid='' for text:" << text);
SAL_WARN_IF(!hidstr.empty() && extendedHelpText.empty(), "helpcompiler", "hid='.' with no hid bookmark branches in file: " << fileName + " for text: " << text);
- HashSet::const_iterator aEnd = extendedHelpText.end();
- for (HashSet::const_iterator iter = extendedHelpText.begin(); iter != aEnd; ++iter)
+ for (auto const& elem : extendedHelpText)
{
- std::string name = *iter;
+ std::string name = elem;
(*helptexts)[name] = text;
}
}
diff --git a/helpcompiler/source/HelpIndexer.cxx b/helpcompiler/source/HelpIndexer.cxx
index 51966f4c71dc..870db09e0faa 100644
--- a/helpcompiler/source/HelpIndexer.cxx
+++ b/helpcompiler/source/HelpIndexer.cxx
@@ -60,8 +60,9 @@ bool HelpIndexer::indexDocuments()
// Index the identified help files
Document doc;
- for (std::set<OUString>::iterator i = d_files.begin(); i != d_files.end(); ++i) {
- helpDocument(*i, &doc);
+ for (auto const& elem : d_files)
+ {
+ helpDocument(elem, &doc);
writer.addDocument(&doc);
doc.clear();
}
diff --git a/helpcompiler/source/HelpLinker.cxx b/helpcompiler/source/HelpLinker.cxx
index 9589da095405..315ce57035ca 100644
--- a/helpcompiler/source/HelpLinker.cxx
+++ b/helpcompiler/source/HelpLinker.cxx
@@ -140,9 +140,8 @@ struct Data
std::string getString() const
{
std::string ret;
- cIter aEnd = _idList.end();
- for (cIter aIter = _idList.begin(); aIter != aEnd; ++aIter)
- ret += *aIter + ";";
+ for (auto const& elem : _idList)
+ ret += elem + ";";
return ret;
}
};
@@ -190,9 +189,8 @@ public:
if( pFile == nullptr )
return;
- DataHashtable::const_iterator aEnd = _hash.end();
- for (DataHashtable::const_iterator aIter = _hash.begin(); aIter != aEnd; ++aIter)
- writeKeyValue_DBHelp( pFile, aIter->first, aIter->second.getString() );
+ for (auto const& elem : _hash)
+ writeKeyValue_DBHelp( pFile, elem.first, elem.second.getString() );
fclose( pFile );
}
@@ -310,13 +308,12 @@ void HelpLinker::link()
initIndexerPreProcessor();
// here we start our loop over the hzip files.
- HashSet::iterator end = helpFiles.end();
- for (HashSet::iterator iter = helpFiles.begin(); iter != end; ++iter)
+ for (auto const& helpFile : helpFiles)
{
// process one file
// streamTable contains the streams in the hzip file
StreamTable streamTable;
- const std::string &xhpFileName = *iter;
+ const std::string &xhpFileName = helpFile;
if (!bExtensionMode && xhpFileName.rfind(".xhp") != xhpFileName.length()-4)
{
@@ -390,11 +387,9 @@ void HelpLinker::link()
if (hidlist && !hidlist->empty())
{
// now iterate over all elements of the hidlist
- HashSet::const_iterator aEnd = hidlist->end();
- for (HashSet::const_iterator hidListIter = hidlist->begin();
- hidListIter != aEnd; ++hidListIter)
+ for (auto & elem : *hidlist)
{
- std::string thishid = *hidListIter;
+ std::string thishid = elem;
std::string anchorB;
size_t index = thishid.rfind('#');
@@ -412,21 +407,17 @@ void HelpLinker::link()
if (anchorToLL && !anchorToLL->empty())
{
std::string fakedHid = URLEncoder::encode(documentPath);
- Hashtable::const_iterator aEnd = anchorToLL->end();
- for (Hashtable::const_iterator enumer = anchorToLL->begin();
- enumer != aEnd; ++enumer)
+ for (auto const& elemAnchor : *anchorToLL)
{
- const std::string &anchor = enumer->first;
+ const std::string &anchor = elemAnchor.first;
addBookmark(pFileDbBase_DBHelp, documentPath, fileB,
anchor, jarfileB, titleB);
std::string totalId = fakedHid + "#" + anchor;
// std::cerr << hzipFileName << std::endl;
- const LinkedList& ll = enumer->second;
- LinkedList::const_iterator aOtherEnd = ll.end();
- for (LinkedList::const_iterator llIter = ll.begin();
- llIter != aOtherEnd; ++llIter)
+ const LinkedList& ll = elemAnchor.second;
+ for (auto const& elem : ll)
{
- helpKeyword.insert(*llIter, totalId);
+ helpKeyword.insert(elem, totalId);
}
}
@@ -436,12 +427,10 @@ void HelpLinker::link()
const Stringtable *helpTextHash = streamTable.appl_helptexts;
if (helpTextHash && !helpTextHash->empty())
{
- Stringtable::const_iterator aEnd = helpTextHash->end();
- for (Stringtable::const_iterator helpTextIter = helpTextHash->begin();
- helpTextIter != aEnd; ++helpTextIter)
+ for (auto const& elem : *helpTextHash)
{
- std::string helpTextId = helpTextIter->first;
- const std::string& helpTextText = helpTextIter->second;
+ std::string helpTextId = elem.first;
+ const std::string& helpTextText = elem.second;
helpTextId = URLEncoder::encode(helpTextId);
@@ -486,12 +475,10 @@ void HelpLinker::link()
if( !bExtensionMode )
{
// New index
- Stringtable::iterator aEnd = additionalFiles.end();
- for (Stringtable::iterator enumer = additionalFiles.begin(); enumer != aEnd;
- ++enumer)
+ for (auto const& additionalFile : additionalFiles)
{
- const std::string &additionalFileName = enumer->second;
- const std::string &additionalFileKey = enumer->first;
+ const std::string &additionalFileName = additionalFile.second;
+ const std::string &additionalFileKey = additionalFile.first;
fs::path fsAdditionalFileName( additionalFileName, fs::native );
HCDBG({
More information about the Libreoffice-commits
mailing list