include/encoding: fix encode/decode for signed durations

Signed-off-by: Sage Weil <sage@redhat.com>
This commit is contained in:
Sage Weil 2019-07-08 15:21:51 -05:00
parent d17aff325f
commit ad03fb5b80

View File

@ -312,8 +312,8 @@ template<typename Rep, typename Period,
void encode(const std::chrono::duration<Rep, Period>& d,
ceph::bufferlist &bl) {
using namespace std::chrono;
uint32_t s = duration_cast<seconds>(d).count();
uint32_t ns = (duration_cast<nanoseconds>(d) % seconds(1)).count();
int32_t s = duration_cast<seconds>(d).count();
int32_t ns = (duration_cast<nanoseconds>(d) % seconds(1)).count();
encode(s, bl);
encode(ns, bl);
}
@ -322,8 +322,8 @@ template<typename Rep, typename Period,
typename std::enable_if_t<std::is_integral_v<Rep>>* = nullptr>
void decode(std::chrono::duration<Rep, Period>& d,
bufferlist::const_iterator& p) {
uint32_t s;
uint32_t ns;
int32_t s;
int32_t ns;
decode(s, p);
decode(ns, p);
d = std::chrono::seconds(s) + std::chrono::nanoseconds(ns);