Created
April 29, 2020 19:21
-
-
Save SaveTheRbtz/d4574ed05687aa905e51ea30d96d8d23 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/source/common/upstream/load_balancer_impl.cc b/source/common/upstream/load_balancer_impl.cc | |
index 185652b66..09816b0cf 100644 | |
--- a/source/common/upstream/load_balancer_impl.cc | |
+++ b/source/common/upstream/load_balancer_impl.cc | |
@@ -1,7 +1,10 @@ | |
#include "common/upstream/load_balancer_impl.h" | |
+#include <algorithm> | |
#include <cstdint> | |
+#include <limits> | |
#include <memory> | |
+#include <random> | |
#include <string> | |
#include <vector> | |
@@ -738,29 +741,49 @@ HostConstSharedPtr EdfLoadBalancerBase::chooseHostOnce(LoadBalancerContext* cont | |
} | |
} | |
-HostConstSharedPtr LeastRequestLoadBalancer::unweightedHostPick(const HostVector& hosts_to_use, | |
- const HostsSource&) { | |
+HostConstSharedPtr LeastRequestLoadBalancer::unweightedHostPickP2C(const HostVector& hosts_to_use) { | |
+ ASSERT(hosts_to_use.size() > 0); | |
+ const size_t hosts_sz = hosts_to_use.size(); | |
+ if (hosts_sz == 1) { | |
+ return hosts_to_use[0]; | |
+ } | |
+ const uint64_t i = random_.random() % hosts_sz; | |
+ const uint64_t j = (i + 1 + random_.random() % (hosts_sz - 1)) % hosts_sz; | |
+ ASSERT(i != j); | |
+ const auto rqi = hosts_to_use[i]->stats().rq_active_.value(); | |
+ const auto rqj = hosts_to_use[j]->stats().rq_active_.value(); | |
+ return rqi < rqj ? hosts_to_use[i] : hosts_to_use[j]; | |
+} | |
+ | |
+HostConstSharedPtr LeastRequestLoadBalancer::unweightedHostPickPNCApprox(const HostVector& hosts_to_use) { | |
HostSharedPtr candidate_host = nullptr; | |
+ uint32_t candidate_active_rq = std::numeric_limits<uint32_t>::max(); | |
for (uint32_t choice_idx = 0; choice_idx < choice_count_; ++choice_idx) { | |
- const int rand_idx = random_.random() % hosts_to_use.size(); | |
- HostSharedPtr sampled_host = hosts_to_use[rand_idx]; | |
- | |
- if (candidate_host == nullptr) { | |
- // Make a first choice to start the comparisons. | |
- candidate_host = sampled_host; | |
- continue; | |
- } | |
- | |
- const auto candidate_active_rq = candidate_host->stats().rq_active_.value(); | |
+ const auto rand_idx = random_.random() % hosts_to_use.size(); | |
+ const HostSharedPtr sampled_host = hosts_to_use[rand_idx]; | |
const auto sampled_active_rq = sampled_host->stats().rq_active_.value(); | |
- if (sampled_active_rq < candidate_active_rq) { | |
+ | |
+ if (candidate_host == nullptr || sampled_active_rq < candidate_active_rq) { | |
candidate_host = sampled_host; | |
+ candidate_active_rq = sampled_active_rq; | |
} | |
} | |
return candidate_host; | |
} | |
+HostConstSharedPtr LeastRequestLoadBalancer::unweightedHostPick(const HostVector& hosts_to_use, | |
+ const HostsSource&) { | |
+ switch (choice_count_) { | |
+ case 0: | |
+ return nullptr; | |
+ case 2: | |
+ return unweightedHostPickP2C(hosts_to_use); | |
+ default: | |
+ return unweightedHostPickPNCApprox(hosts_to_use); | |
+ } | |
+} | |
+ | |
HostConstSharedPtr RandomLoadBalancer::chooseHostOnce(LoadBalancerContext* context) { | |
const absl::optional<HostsSource> hosts_source = hostSourceToUse(context); | |
if (!hosts_source) { | |
diff --git a/source/common/upstream/load_balancer_impl.h b/source/common/upstream/load_balancer_impl.h | |
index f2d71fb09..7d9a3378d 100644 | |
--- a/source/common/upstream/load_balancer_impl.h | |
+++ b/source/common/upstream/load_balancer_impl.h | |
@@ -461,7 +461,15 @@ private: | |
} | |
HostConstSharedPtr unweightedHostPick(const HostVector& hosts_to_use, | |
const HostsSource& source) override; | |
+ // O(1) fast path for N=2 | |
+ HostConstSharedPtr unweightedHostPickP2C(const HostVector& hosts_to_use); | |
+ // O(m) slowpath, where m is number of hosts to use | |
+ HostConstSharedPtr unweightedHostPickPNC(const HostVector& hosts_to_use); | |
+ // O(n) slowpath, where n is number of choices to make | |
+ HostConstSharedPtr unweightedHostPickPNCApprox(const HostVector& hosts_to_use); | |
const uint32_t choice_count_; | |
+ | |
+ friend class LeastRequestLoadBalancerTest; | |
}; | |
/** | |
diff --git a/test/common/upstream/load_balancer_impl_test.cc b/test/common/upstream/load_balancer_impl_test.cc | |
index 0b7487004..c9f06be90 100644 | |
--- a/test/common/upstream/load_balancer_impl_test.cc | |
+++ b/test/common/upstream/load_balancer_impl_test.cc | |
@@ -1316,14 +1316,14 @@ TEST_P(LeastRequestLoadBalancerTest, SingleHost) { | |
// Host weight is 1. | |
{ | |
- EXPECT_CALL(random_, random()).WillOnce(Return(0)).WillOnce(Return(2)).WillOnce(Return(3)); | |
+ EXPECT_CALL(random_, random()).WillOnce(Return(0)); | |
stats_.max_host_weight_.set(1UL); | |
EXPECT_EQ(hostSet().healthy_hosts_[0], lb_.chooseHost(nullptr)); | |
} | |
// Host weight is 100. | |
{ | |
- EXPECT_CALL(random_, random()).WillOnce(Return(0)).WillOnce(Return(2)).WillOnce(Return(3)); | |
+ EXPECT_CALL(random_, random()).WillOnce(Return(0)); | |
stats_.max_host_weight_.set(100UL); | |
EXPECT_EQ(hostSet().healthy_hosts_[0], lb_.chooseHost(nullptr)); | |
} | |
@@ -1331,7 +1331,7 @@ TEST_P(LeastRequestLoadBalancerTest, SingleHost) { | |
HostVector empty; | |
{ | |
hostSet().runCallbacks(empty, empty); | |
- EXPECT_CALL(random_, random()).WillOnce(Return(0)).WillOnce(Return(2)).WillOnce(Return(3)); | |
+ EXPECT_CALL(random_, random()).WillOnce(Return(0)); | |
EXPECT_EQ(hostSet().healthy_hosts_[0], lb_.chooseHost(nullptr)); | |
} | |
@@ -1364,6 +1364,64 @@ TEST_P(LeastRequestLoadBalancerTest, Normal) { | |
EXPECT_EQ(hostSet().healthy_hosts_[1], lb_.chooseHost(nullptr)); | |
} | |
+ | |
+// TEST_F(LeastRequestLoadBalancerTest, unweightedHost2) { | |
+// HostVector healthy_hosts = { | |
+// makeTestHost(info_, "tcp://127.0.0.1:80"), | |
+// makeTestHost(info_, "tcp://127.0.0.1:81"), | |
+// makeTestHost(info_, "tcp://127.0.0.1:82"), | |
+// makeTestHost(info_, "tcp://127.0.0.1:83"), | |
+// }; | |
+// healthy_hosts[0]->stats().rq_active_.set(4); | |
+// healthy_hosts[1]->stats().rq_active_.set(3); | |
+// healthy_hosts[2]->stats().rq_active_.set(2); | |
+// healthy_hosts[3]->stats().rq_active_.set(1); | |
+ | |
+// const auto host1 = HostVector(healthy_hosts.begin(), healthy_hosts.begin() + 1); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[0], lb_.unweightedHostPickP2C(host1)); | |
+ | |
+// const auto host2 = HostVector(healthy_hosts.begin(), healthy_hosts.begin() + 2); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[1], lb_.unweightedHostPickP2C(host2)); | |
+ | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[3], lb_.unweightedHostPickP2C(healthy_hosts)); | |
+// } | |
+ | |
+// TEST_P(LeastRequestLoadBalancerTest, unweightedHostN) { | |
+// HostVector healthy_hosts = { | |
+// makeTestHost(info_, "tcp://127.0.0.1:80"), | |
+// makeTestHost(info_, "tcp://127.0.0.1:81"), | |
+// makeTestHost(info_, "tcp://127.0.0.1:82"), | |
+// makeTestHost(info_, "tcp://127.0.0.1:83"), | |
+// }; | |
+// healthy_hosts[0]->stats().rq_active_.set(4); | |
+// healthy_hosts[1]->stats().rq_active_.set(3); | |
+// healthy_hosts[2]->stats().rq_active_.set(2); | |
+// healthy_hosts[3]->stats().rq_active_.set(1); | |
+ | |
+// const auto host1 = HostVector(healthy_hosts.begin(), healthy_hosts.begin() + 1); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[0], lb_.unweightedHostPickPNC(host1)); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[0], lb_.unweightedHostPickPNC(host1)); | |
+ | |
+// const auto host2 = HostVector(healthy_hosts.begin(), healthy_hosts.begin() + 2); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[0], lb_.unweightedHostPickPNC(host2)); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(0)); | |
+// EXPECT_EQ(healthy_hosts[1], lb_.unweightedHostPickPNC(host2)); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[1], lb_.unweightedHostPickPNC(host2)); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[1], lb_.unweightedHostPickPNC(host2)); | |
+ | |
+// const auto all_hosts = HostVector(healthy_hosts.begin(), healthy_hosts.end()); | |
+// EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(1)); | |
+// EXPECT_EQ(healthy_hosts[3], lb_.unweightedHostPickPNC(healthy_hosts)); | |
+// } | |
+ | |
TEST_P(LeastRequestLoadBalancerTest, PNC) { | |
hostSet().healthy_hosts_ = { | |
makeTestHost(info_, "tcp://127.0.0.1:80"), makeTestHost(info_, "tcp://127.0.0.1:81"), | |
@@ -1379,36 +1437,39 @@ TEST_P(LeastRequestLoadBalancerTest, PNC) { | |
// Creating various load balancer objects with different choice configs. | |
envoy::api::v2::Cluster::LeastRequestLbConfig lr_lb_config; | |
+ lr_lb_config.mutable_choice_count()->set_value(1); | |
+ LeastRequestLoadBalancer lb_1{priority_set_, nullptr, stats_, runtime_, | |
+ random_, common_config_, lr_lb_config}; | |
lr_lb_config.mutable_choice_count()->set_value(2); | |
LeastRequestLoadBalancer lb_2{priority_set_, nullptr, stats_, runtime_, | |
random_, common_config_, lr_lb_config}; | |
+ lr_lb_config.mutable_choice_count()->set_value(3); | |
+ LeastRequestLoadBalancer lb_3{priority_set_, nullptr, stats_, runtime_, | |
+ random_, common_config_, lr_lb_config}; | |
lr_lb_config.mutable_choice_count()->set_value(5); | |
LeastRequestLoadBalancer lb_5{priority_set_, nullptr, stats_, runtime_, | |
random_, common_config_, lr_lb_config}; | |
- // Verify correct number of choices. | |
- | |
// 0 choices configured should default to P2C. | |
EXPECT_CALL(random_, random()).Times(3).WillRepeatedly(Return(0)); | |
- EXPECT_EQ(hostSet().healthy_hosts_[0], lb_.chooseHost(nullptr)); | |
+ EXPECT_EQ(hostSet().healthy_hosts_[1], lb_.chooseHost(nullptr)); | |
+ | |
+ // 1 choice configured results in a random choice. | |
+ EXPECT_CALL(random_, random()).Times(2).WillRepeatedly(Return(8)); | |
+ EXPECT_EQ(hostSet().healthy_hosts_[8 % 4], lb_1.chooseHost(nullptr)); | |
// 2 choices configured results in P2C. | |
- EXPECT_CALL(random_, random()).Times(3).WillRepeatedly(Return(0)); | |
- EXPECT_EQ(hostSet().healthy_hosts_[0], lb_2.chooseHost(nullptr)); | |
- | |
- // 5 choices configured results in P5C. | |
- EXPECT_CALL(random_, random()).Times(6).WillRepeatedly(Return(0)); | |
- EXPECT_EQ(hostSet().healthy_hosts_[0], lb_5.chooseHost(nullptr)); | |
- | |
- // Verify correct host chosen in P5C scenario. | |
- EXPECT_CALL(random_, random()) | |
- .Times(6) | |
- .WillOnce(Return(0)) | |
- .WillOnce(Return(3)) | |
- .WillOnce(Return(0)) | |
- .WillOnce(Return(3)) | |
- .WillOnce(Return(2)) | |
- .WillOnce(Return(1)); | |
+ EXPECT_CALL(random_, random()).Times(3).WillRepeatedly(Return(1)); | |
+ EXPECT_EQ(hostSet().healthy_hosts_[3], lb_2.chooseHost(nullptr)); | |
+ | |
+ // 3 choices configured results in neither of the 2 most loaded hosts being picked. | |
+ EXPECT_CALL(random_, random()).Times(4).WillRepeatedly(Return(0)); | |
+ const auto picked = lb_3.chooseHost(nullptr); | |
+ EXPECT_NE(hostSet().healthy_hosts_[0], picked); | |
+ EXPECT_NE(hostSet().healthy_hosts_[1], picked); | |
+ | |
+ // 5 choices configured results in the least loaded host being picked. | |
+ EXPECT_CALL(random_, random()).Times(4).WillRepeatedly(Return(0)); | |
EXPECT_EQ(hostSet().healthy_hosts_[3], lb_5.chooseHost(nullptr)); | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment