Use <cstdint> instead of <boost/cstdint.hpp>

This is mandated by C++11.
This commit is contained in:
Jean-Marc Lasgouttes 2019-06-17 16:19:31 +02:00
parent fbe0caa483
commit 5249eaaa60
5 changed files with 11 additions and 15 deletions

View File

@ -23,10 +23,9 @@
#include "support/textutils.h"
#include "support/unicode.h"
#include <boost/cstdint.hpp>
#include <iterator>
#include <algorithm>
#include <cstdint>
#include <iterator>
#include <sstream>
using namespace std;
@ -703,7 +702,7 @@ void Encodings::read(FileName const & encfile, FileName const & symbolsfile)
istringstream is(symbolslex.getString());
// reading symbol directly does not work if
// char_type == wchar_t.
boost::uint32_t tmp;
uint32_t tmp;
if(!(is >> hex >> tmp))
break;
symbol = tmp;

View File

@ -20,7 +20,7 @@
#include <QObject>
#include <QStringList>
#include <boost/cstdint.hpp>
#include <cstdint>
namespace lyx {
namespace frontend {
@ -94,7 +94,7 @@ private:
/// that can be stored in the clipboard
CacheMimeData cache_;
/// checksum for internal clipboard data (used on Mac)
boost::uint32_t checksum;
std::uint32_t checksum;
};
QString const lyxMimeType();

View File

@ -83,9 +83,8 @@
#include "support/lassert.h"
#include <boost/cstdint.hpp>
#include <cerrno>
#include <cstdint>
#include <fstream>
#include <utility>
@ -94,7 +93,6 @@
#endif
using namespace std;
using boost::uint32_t;
namespace lyx {

View File

@ -23,13 +23,13 @@ namespace lyx { typedef wchar_t char_type; }
#else
#if defined(_MSC_VER) && (_MSC_VER >= 1600)
#include <cstdint>
#if defined(_MSC_VER) && (_MSC_VER >= 1600)
namespace lyx { typedef uint32_t char_type; }
#include "support/numpunct_lyx_char_type.h" // implementation for our char_type needed
#else
#include <boost/cstdint.hpp>
namespace lyx { typedef boost::uint32_t char_type; }
namespace lyx { typedef std::uint32_t char_type; }
#endif
#endif

View File

@ -19,9 +19,8 @@
#include <iconv.h>
#include <boost/cstdint.hpp>
#include <cerrno>
#include <cstdint>
#include <map>
#include <ostream>
//Needed in MSVC
@ -136,7 +135,7 @@ int IconvProcessor::convert(char const * buf, size_t buflen,
for (size_t i = 0; i < buflen; ++i) {
// char may be signed, avoid output of
// something like 0xffffffc2
boost::uint32_t const b =
uint32_t const b =
*reinterpret_cast<unsigned char const *>(buf + i);
lyxerr << " 0x" << (unsigned int)b;
}