diff --git a/src/crimson/osd/backfill_state.cc b/src/crimson/osd/backfill_state.cc index bd5df9ad1a0..75e1b2209a3 100644 --- a/src/crimson/osd/backfill_state.cc +++ b/src/crimson/osd/backfill_state.cc @@ -5,7 +5,11 @@ #include #include "crimson/osd/backfill_state.h" +#ifndef BACKFILL_UNITTEST #include "crimson/osd/backfill_facades.h" +#else +#include "test/crimson/test_backfill_facades.h" +#endif namespace { seastar::logger& logger() { diff --git a/src/test/crimson/CMakeLists.txt b/src/test/crimson/CMakeLists.txt index 86f76dee64e..2882cc6e541 100644 --- a/src/test/crimson/CMakeLists.txt +++ b/src/test/crimson/CMakeLists.txt @@ -1,3 +1,13 @@ +# the crimson's backfill doesn't need nor use seastar +add_executable(unittest-crimson-backfill + test_backfill.cc + ${PROJECT_SOURCE_DIR}/src/auth/Crypto.cc + ${PROJECT_SOURCE_DIR}/src/crimson/osd/backfill_state.cc + ${PROJECT_SOURCE_DIR}/src/osd/recovery_types.cc) +add_ceph_unittest(unittest-crimson-backfill) +target_compile_definitions(unittest-crimson-backfill PRIVATE -DBACKFILL_UNITTEST) +target_link_libraries(unittest-crimson-backfill crimson GTest::Main) + add_executable(unittest-seastar-buffer test_buffer.cc) add_ceph_test(unittest-seastar-buffer diff --git a/src/test/crimson/test_backfill.cc b/src/test/crimson/test_backfill.cc new file mode 100644 index 00000000000..bfd95ae3c17 --- /dev/null +++ b/src/test/crimson/test_backfill.cc @@ -0,0 +1,362 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "common/hobject.h" +#include "crimson/osd/backfill_state.h" +#include "osd/recovery_types.h" +#include "test/crimson/test_backfill_facades.h" + + +// The sole purpose is to convert from the string representation. +// An alternative approach could use boost::range in FakeStore's +// constructor. +struct improved_hobject_t : hobject_t { + improved_hobject_t(const char parsable_name[]) { + this->parse(parsable_name); + } + improved_hobject_t(const hobject_t& obj) + : hobject_t(obj) { + } + bool operator==(const improved_hobject_t& rhs) const { + return static_cast(*this) == \ + static_cast(rhs); + } +}; + + +struct FakeStore { + using objs_t = std::map; + + objs_t objs; + + void push(const hobject_t& obj, eversion_t version) { + objs[obj] = version; + } + + template + hobject_t list(const hobject_t& start, Func&& per_entry) const { + auto it = objs.lower_bound(start); + for (auto max = std::numeric_limits::max(); + it != std::end(objs) && max > 0; + ++it, --max) { + per_entry(*it); + } + return it != std::end(objs) ? static_cast(it->first) + : hobject_t::get_max(); + } + + bool operator==(const FakeStore& rhs) const { + return std::size(objs) == std::size(rhs.objs) && \ + std::equal(std::begin(objs), std::end(objs), std::begin(rhs.objs)); + } + bool operator!=(const FakeStore& rhs) const { + return !(*this == rhs); + } +}; + + +struct FakeReplica { + FakeStore store; + hobject_t last_backfill; + + FakeReplica(FakeStore&& store) + : store(std::move(store)) { + } +}; + +struct FakePrimary { + FakeStore store; + eversion_t last_update; + eversion_t projected_last_update; + eversion_t log_tail; + + FakePrimary(FakeStore&& store) + : store(std::move(store)) { + } +}; + +class BackfillFixture : public crimson::osd::BackfillState::BackfillListener { + friend class BackfillFixtureBuilder; + + FakePrimary backfill_source; + std::map backfill_targets; + + std::deque< + boost::intrusive_ptr< + const boost::statechart::event_base>> events_to_dispatch; + crimson::osd::BackfillState backfill_state; + + BackfillFixture(FakePrimary&& backfill_source, + std::map&& backfill_targets); + + template + void schedule_event(const EventT& event) { + events_to_dispatch.emplace_back(event.intrusive_from_this()); + } + + // BackfillListener { + void request_replica_scan( + const pg_shard_t& target, + const hobject_t& begin, + const hobject_t& end) override; + + void request_primary_scan( + const hobject_t& begin) override; + + void enqueue_push( + const pg_shard_t& target, + const hobject_t& obj, + const eversion_t& v) override; + + void enqueue_drop( + const pg_shard_t& target, + const hobject_t& obj, + const eversion_t& v) override; + + void update_peers_last_backfill( + const hobject_t& new_last_backfill) override; + + bool budget_available() const override; + +public: + MOCK_METHOD(void, backfilled, (), (override)); + // } + + void next_round(std::size_t how_many=1) { + ceph_assert(events_to_dispatch.size() >= how_many); + while (how_many-- > 0) { + backfill_state.process_event(std::move(events_to_dispatch.front())); + events_to_dispatch.pop_front(); + } + } + + bool all_stores_look_like(const FakeStore& reference) const { + const bool all_replica_match = std::all_of( + std::begin(backfill_targets), std::end(backfill_targets), + [&reference] (const auto kv) { + return kv.second.store == reference; + }); + return backfill_source.store == reference && all_replica_match; + } + + struct PeeringFacade; + struct PGFacade; +}; + +struct BackfillFixture::PeeringFacade + : public crimson::osd::BackfillState::PeeringFacade { + FakePrimary& backfill_source; + std::map& backfill_targets; + + PeeringFacade(FakePrimary& backfill_source, + std::map& backfill_targets) + : backfill_source(backfill_source), + backfill_targets(backfill_targets) { + } + + hobject_t earliest_backfill() const override { + hobject_t e = hobject_t::get_max(); + for (const auto& kv : backfill_targets) { + e = std::min(kv.second.last_backfill, e); + } + return e; + } + std::set get_backfill_targets() const override { + std::set result; + std::transform( + std::begin(backfill_targets), std::end(backfill_targets), + std::inserter(result, std::end(result)), + [](auto pair) { + return pair.first; + }); + return result; + } + const hobject_t& get_peer_last_backfill(pg_shard_t peer) const override { + return backfill_targets.at(peer).last_backfill; + } + const eversion_t& get_last_update() const override { + return backfill_source.last_update; + } + const eversion_t& get_log_tail() const override { + return backfill_source.log_tail; + } + template + void scan_log_after(Args&&... args) const { + } + + bool is_backfill_target(pg_shard_t peer) const override { + return backfill_targets.count(peer) == 1; + } + void update_complete_backfill_object_stats(const hobject_t &hoid, + const pg_stat_t &stats) override { + } + bool is_backfilling() const override { + return true; + } +}; + +struct BackfillFixture::PGFacade : public crimson::osd::BackfillState::PGFacade { + FakePrimary& backfill_source; + + PGFacade(FakePrimary& backfill_source) + : backfill_source(backfill_source) { + } + + const eversion_t& get_projected_last_update() const override { + return backfill_source.projected_last_update; + } +}; + +BackfillFixture::BackfillFixture( + FakePrimary&& backfill_source, + std::map&& backfill_targets) + : backfill_source(std::move(backfill_source)), + backfill_targets(std::move(backfill_targets)), + backfill_state(*this, + std::make_unique(this->backfill_source, + this->backfill_targets), + std::make_unique(this->backfill_source)) +{ + backfill_state.process_event(crimson::osd::BackfillState::Triggered{}.intrusive_from_this()); +} + +void BackfillFixture::request_replica_scan( + const pg_shard_t& target, + const hobject_t& begin, + const hobject_t& end) +{ + std::cout << __func__ << std::endl; + + BackfillInterval bi; + bi.end = backfill_targets.at(target).store.list(begin, [&bi](auto kv) { + std::cout << kv << std::endl; + bi.objects.insert(std::move(kv)); + }); + bi.begin = begin; + bi.version = backfill_source.last_update; + + schedule_event(crimson::osd::BackfillState::ReplicaScanned{ target, std::move(bi) }); +} + +void BackfillFixture::request_primary_scan( + const hobject_t& begin) +{ + std::cout << __func__ << std::endl; + + BackfillInterval bi; + bi.end = backfill_source.store.list(begin, [&bi](auto kv) { + std::cout << kv << std::endl; + bi.objects.insert(std::move(kv)); + }); + bi.begin = begin; + bi.version = backfill_source.last_update; + + schedule_event(crimson::osd::BackfillState::PrimaryScanned{ std::move(bi) }); +} + +void BackfillFixture::enqueue_push( + const pg_shard_t& target, + const hobject_t& obj, + const eversion_t& v) +{ + backfill_targets.at(target).store.push(obj, v); + schedule_event(crimson::osd::BackfillState::ObjectPushed{ obj }); +} + +void BackfillFixture::enqueue_drop( + const pg_shard_t& target, + const hobject_t& obj, + const eversion_t& v) +{ + std::cout << __func__ << std::endl; +} + +void BackfillFixture::update_peers_last_backfill( + const hobject_t& new_last_backfill) +{ + std::cout << __func__ << std::endl; +} + +bool BackfillFixture::budget_available() const +{ + std::cout << __func__ << std::endl; + return true; +} + +struct BackfillFixtureBuilder { + FakeStore backfill_source; + std::map backfill_targets; + + static BackfillFixtureBuilder add_source(FakeStore::objs_t objs) { + BackfillFixtureBuilder bfb; + bfb.backfill_source = FakeStore{ std::move(objs) }; + return bfb; + } + + BackfillFixtureBuilder&& add_target(FakeStore::objs_t objs) && { + const auto new_osd_num = std::size(backfill_targets); + const auto [ _, inserted ] = backfill_targets.emplace( + new_osd_num, FakeReplica{ FakeStore{std::move(objs)} }); + ceph_assert(inserted); + return std::move(*this); + } + + BackfillFixture get_result() && { + return BackfillFixture{ std::move(backfill_source), + std::move(backfill_targets) }; + } +}; + +// The straightest case: single primary, single replica. All have the same +// content in their object stores, so the entire backfill boils into just +// `request_primary_scan()` and `request_replica_scan()`. +TEST(backfill, same_primary_same_replica) +{ + const auto reference_store = FakeStore{ { + { "1:00058bcc:::rbd_data.1018ac3e755.00000000000000d5:head", {10, 234} }, + { "1:00ed7f8e:::rbd_data.1018ac3e755.00000000000000af:head", {10, 196} }, + { "1:01483aea:::rbd_data.1018ac3e755.0000000000000095:head", {10, 169} }, + }}; + auto cluster_fixture = BackfillFixtureBuilder::add_source( + reference_store.objs + ).add_target( + reference_store.objs + ).get_result(); + + cluster_fixture.next_round(); + EXPECT_CALL(cluster_fixture, backfilled); + cluster_fixture.next_round(); + EXPECT_TRUE(cluster_fixture.all_stores_look_like(reference_store)); +} + +TEST(backfill, single_empty_replica) +{ + const auto reference_store = FakeStore{ { + { "1:00058bcc:::rbd_data.1018ac3e755.00000000000000d5:head", {10, 234} }, + { "1:00ed7f8e:::rbd_data.1018ac3e755.00000000000000af:head", {10, 196} }, + { "1:01483aea:::rbd_data.1018ac3e755.0000000000000095:head", {10, 169} }, + }}; + auto cluster_fixture = BackfillFixtureBuilder::add_source( + reference_store.objs + ).add_target( + { /* nothing */ } + ).get_result(); + + cluster_fixture.next_round(); + cluster_fixture.next_round(); + cluster_fixture.next_round(2); + EXPECT_CALL(cluster_fixture, backfilled); + cluster_fixture.next_round(); + EXPECT_TRUE(cluster_fixture.all_stores_look_like(reference_store)); +} diff --git a/src/test/crimson/test_backfill_facades.h b/src/test/crimson/test_backfill_facades.h new file mode 100644 index 00000000000..d54d14ca5b4 --- /dev/null +++ b/src/test/crimson/test_backfill_facades.h @@ -0,0 +1,39 @@ +// -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*- +// vim: ts=8 sw=2 smarttab + +#pragma once + +#include "crimson/osd/backfill_state.h" + +namespace crimson::osd { + +// PeeringFacade -- a facade (in the GoF-defined meaning) simplifying +// the interface of PeeringState. The motivation is to have an inventory +// of behaviour that must be provided by a unit test's mock. +struct BackfillState::PeeringFacade { + virtual hobject_t earliest_backfill() const = 0; + virtual std::set get_backfill_targets() const = 0; + virtual const hobject_t& get_peer_last_backfill(pg_shard_t peer) const = 0; + virtual const eversion_t& get_last_update() const = 0; + virtual const eversion_t& get_log_tail() const = 0; + + template + void scan_log_after(Args&&... args) const { + } + + virtual bool is_backfill_target(pg_shard_t peer) const = 0; + virtual void update_complete_backfill_object_stats(const hobject_t &hoid, + const pg_stat_t &stats) = 0; + virtual bool is_backfilling() const = 0; + virtual ~PeeringFacade() {} +}; + +// PGFacade -- a facade (in the GoF-defined meaning) simplifying the huge +// interface of crimson's PG class. The motivation is to have an inventory +// of behaviour that must be provided by a unit test's mock. +struct BackfillState::PGFacade { + virtual const eversion_t& get_projected_last_update() const = 0; + virtual ~PGFacade() {} +}; + +} // namespace crimson::osd