[Libreoffice-commits] online.git: common/Authorization.cpp common/Protocol.hpp
Pranam Lashkari (via logerrit)
logerrit at kemper.freedesktop.org
Mon May 4 13:11:44 UTC 2020
common/Authorization.cpp | 6 ++++--
common/Protocol.hpp | 29 +++++++++++++++++++++--------
2 files changed, 25 insertions(+), 10 deletions(-)
New commits:
commit 8aa9b37b8601a01751369112742d012cc6800ecd
Author: Pranam Lashkari <lpranam at collabora.com>
AuthorDate: Sat Nov 23 22:23:11 2019 +0530
Commit: Michael Meeks <michael.meeks at collabora.com>
CommitDate: Mon May 4 15:11:26 2020 +0200
removed tokenize method with regex
Added new tokenize method with const char* delimiter
Change-Id: Id1c4e89e5418d66aaf348ff4d8c3855f80fb4656
Reviewed-on: https://gerrit.libreoffice.org/c/online/+/83574
Tested-by: Jenkins CollaboraOffice <jenkinscollaboraoffice at gmail.com>
Reviewed-by: Michael Meeks <michael.meeks at collabora.com>
diff --git a/common/Authorization.cpp b/common/Authorization.cpp
index 138f98889..cb605fd41 100644
--- a/common/Authorization.cpp
+++ b/common/Authorization.cpp
@@ -53,9 +53,11 @@ void Authorization::authorizeRequest(Poco::Net::HTTPRequest& request) const
// Authorization: Basic ....
// X-Something-Custom: Huh
// Regular expression evaluates and finds "\n\r" and tokenizes accordingly
- std::vector<std::string> tokens(LOOLProtocol::tokenize(_data, std::regex(R"(\n\r)"), /*skipEmpty =*/ true));
- for (const auto& token : tokens)
+ StringVector tokens(LOOLProtocol::tokenize(_data, "\n\r"));
+ for (auto it = tokens.begin(); it != tokens.end(); ++it)
{
+ std::string token = tokens.getParam(*it);
+
size_t separator = token.find_first_of(':');
if (separator != std::string::npos)
{
diff --git a/common/Protocol.hpp b/common/Protocol.hpp
index 72a9aaa34..1327c5fc5 100644
--- a/common/Protocol.hpp
+++ b/common/Protocol.hpp
@@ -138,16 +138,29 @@ namespace LOOLProtocol
return tokenize(s.data(), s.size(), delimiter);
}
- /// Tokenize according to the regex, potentially skip empty tokens.
inline
- std::vector<std::string> tokenize(const std::string& s, const std::regex& pattern, bool skipEmpty = false)
+ StringVector tokenize(const std::string& s, const char* delimiter)
{
- std::vector<std::string> tokens;
- if (skipEmpty)
- std::copy_if(std::sregex_token_iterator(s.begin(), s.end(), pattern, -1), std::sregex_token_iterator(), std::back_inserter(tokens), [](std::string in) { return !in.empty(); });
- else
- std::copy(std::sregex_token_iterator(s.begin(), s.end(), pattern, -1), std::sregex_token_iterator(), std::back_inserter(tokens));
- return tokens;
+ std::vector<StringToken> tokens;
+ if (s.size() == 0)
+ {
+ return StringVector(std::string(), {});
+ }
+
+ size_t start = 0;
+ size_t end = s.find(delimiter, start);
+
+ tokens.emplace_back(start, end - start);
+ start = end + std::strlen(delimiter);
+
+ while(end != std::string::npos)
+ {
+ end = s.find(delimiter, start);
+ tokens.emplace_back(start, end - start);
+ start = end + std::strlen(delimiter);
+ }
+
+ return StringVector(s, tokens);
}
inline
More information about the Libreoffice-commits
mailing list