I am continuing to learn the Boost Spirit library and have comile issue with example that I couldn`t compile. The source of example you can find here: source place. Also you can look at this code and compile result on Coliru
#include <boost/config/warning_disable.hpp>
#include <boost/spirit/include/lex_lexertl.hpp>
//#define BOOST_SPIRIT_USE_PHOENIX_V3
#include <boost/spirit/include/phoenix_operator.hpp>
#include <boost/spirit/include/phoenix_statement.hpp>
#include <boost/spirit/include/phoenix_algorithm.hpp>
#include <boost/spirit/include/phoenix_core.hpp>
#include <string>
#include <iostream>
namespace lex = boost::spirit::lex;
struct distance_func
{
template <typename Iterator1, typename Iterator2>
struct result : boost::iterator_difference<Iterator1> {};
template <typename Iterator1, typename Iterator2>
typename result<Iterator1, Iterator2>::type
operator()(Iterator1& begin, Iterator2& end) const
{
return std::distance(begin, end);
}
};
boost::phoenix::function<distance_func> const distance = distance_func();
//[wcl_token_definition
template <typename Lexer>
struct word_count_tokens : lex::lexer<Lexer>
{
word_count_tokens()
: c(0), w(0), l(0)
, word("[^ \t\n]+") // define tokens
, eol("\n")
, any(".")
{
using boost::spirit::lex::_start;
using boost::spirit::lex::_end;
using boost::phoenix::ref;
// associate tokens with the lexer
this->self
= word [++ref(w), ref(c) += distance(_start, _end)]
| eol [++ref(c), ++ref(l)]
| any [++ref(c)]
;
}
std::size_t c, w, l;
lex::token_def<> word, eol, any;
};
//]
///////////////////////////////////////////////////////////////////////////////
//[wcl_main
int main(int argc, char* argv[])
{
typedef
lex::lexertl::token<char const*, lex::omit, boost::mpl::false_>
token_type;
/*< This defines the lexer type to use
>*/ typedef lex::lexertl::actor_lexer<token_type> lexer_type;
/*< Create the lexer object instance needed to invoke the lexical analysis
>*/ word_count_tokens<lexer_type> word_count_lexer;
/*< Read input from the given file, tokenize all the input, while discarding
all generated tokens
>*/ std::string str;
char const* first = str.c_str();
char const* last = &first[str.size()];
/*< Create a pair of iterators returning the sequence of generated tokens
>*/ lexer_type::iterator_type iter = word_count_lexer.begin(first, last);
lexer_type::iterator_type end = word_count_lexer.end();
/*< Here we simply iterate over all tokens, making sure to break the loop
if an invalid token gets returned from the lexer
>*/ while (iter != end && token_is_valid(*iter))
++iter;
if (iter == end) {
std::cout << "lines: " << word_count_lexer.l
<< ", words: " << word_count_lexer.w
<< ", characters: " << word_count_lexer.c
<< "\n";
}
else {
std::string rest(first, last);
std::cout << "Lexical analysis failed\n" << "stopped at: \""
<< rest << "\"\n";
}
return 0;
}
When I try to compile it I receive a lot of errors, see full list on Coliru.
What wrong with this example? What and why need be changed to compile it?
Apparently something changed in the internals of Lex, and the iterator(s) are now rvalues sometimes.
You need to adjust the distance_func
to either read
operator()(Iterator1 begin, Iterator2 end) const
or
operator()(Iterator1 const& begin, Iterator2 const& end) const
Then it works. See Live On Coliru