Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
update name
  • Loading branch information
esythan committed Mar 30, 2022
commit 61e6d9eb377192545b687c7342bd7bbf0c946b56
18 changes: 9 additions & 9 deletions paddle/fluid/distributed/test/brpc_service_dense_sgd_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ void RunServer() {

auto _ps_env = paddle::distributed::PaddlePSEnvironment();
LOG(INFO) << "RUN set_ps_servers";
_ps_env.set_ps_servers(&host_sign_list_, 1);
_ps_env.SetPsServers(&host_sign_list_, 1);
pserver_ptr_ = std::shared_ptr<paddle::distributed::PSServer>(
paddle::distributed::PSServerFactory::create(server_proto));
LOG(INFO) << "RUN configure";
Expand All @@ -175,7 +175,7 @@ void RunClient(std::map<uint64_t, std::vector<paddle::distributed::Region>>&
auto servers_ = host_sign_list_.size();
_ps_env = paddle::distributed::PaddlePSEnvironment();
LOG(INFO) << "Run set_ps_servers";
_ps_env.set_ps_servers(&host_sign_list_, servers_);
_ps_env.SetPsServers(&host_sign_list_, servers_);
LOG(INFO) << "Run Create PSClient";
worker_ptr_ = std::shared_ptr<paddle::distributed::PSClient>(
paddle::distributed::PSClientFactory::create(worker_proto));
Expand All @@ -187,7 +187,7 @@ void RunBrpcPushDense() {
setenv("http_proxy", "", 1);
setenv("https_proxy", "", 1);
auto ph_host = paddle::distributed::PSHost(ip_, port_, 0);
host_sign_list_.push_back(ph_host.serialize_to_string());
host_sign_list_.push_back(ph_host.SerializeToString());

// Srart Server
std::thread server_thread(RunServer);
Expand Down Expand Up @@ -229,10 +229,10 @@ void RunBrpcPushDense() {

LOG(INFO) << "Run push_dense_param";
auto push_status =
worker_ptr_->push_dense_param(regions.data(), regions.size(), 0);
worker_ptr_->PushDenseParam(regions.data(), regions.size(), 0);
push_status.wait();

pull_status = worker_ptr_->pull_dense(regions.data(), regions.size(), 0);
pull_status = worker_ptr_->PullDense(regions.data(), regions.size(), 0);
pull_status.wait();

for (size_t idx = 0; idx < tensor->numel(); ++idx) {
Expand All @@ -257,21 +257,21 @@ void RunBrpcPushDense() {

LOG(INFO) << "Run pull_dense_grad";
auto push_grad_status =
worker_ptr_->push_dense_raw_gradient(0, temp, tensor->numel(), closure);
worker_ptr_->PushDenseRawGradient(0, temp, tensor->numel(), closure);
push_grad_status.wait();

auto pull_update_status =
worker_ptr_->pull_dense(regions.data(), regions.size(), 0);
worker_ptr_->PullDense(regions.data(), regions.size(), 0);
pull_update_status.wait();

for (size_t idx = 0; idx < tensor->numel(); ++idx) {
EXPECT_FLOAT_EQ(w[idx], float(idx) - 1.0);
}

LOG(INFO) << "Run stop_server";
worker_ptr_->stop_server();
worker_ptr_->StopServer();
LOG(INFO) << "Run finalize_worker";
worker_ptr_->finalize_worker();
worker_ptr_->FinalizeWorker();
server_thread.join();
}

Expand Down
6 changes: 3 additions & 3 deletions paddle/fluid/distributed/test/brpc_service_sparse_sgd_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ void RunServer() {
::paddle::distributed::PSParameter server_proto = GetServerProto();

auto _ps_env = paddle::distributed::PaddlePSEnvironment();
_ps_env.set_ps_servers(&host_sign_list_, 1);
_ps_env.SetPsServers(&host_sign_list_, 1);
pserver_ptr_ = std::shared_ptr<paddle::distributed::PSServer>(
paddle::distributed::PSServerFactory::create(server_proto));
std::vector<framework::ProgramDesc> empty_vec;
Expand All @@ -172,7 +172,7 @@ void RunClient(std::map<uint64_t, std::vector<paddle::distributed::Region>>&
paddle::distributed::PaddlePSEnvironment _ps_env;
auto servers_ = host_sign_list_.size();
_ps_env = paddle::distributed::PaddlePSEnvironment();
_ps_env.set_ps_servers(&host_sign_list_, servers_);
_ps_env.SetPsServers(&host_sign_list_, servers_);
worker_ptr_ = std::shared_ptr<paddle::distributed::PSClient>(
paddle::distributed::PSClientFactory::create(worker_proto));
worker_ptr_->Configure(worker_proto, dense_regions, _ps_env, 0);
Expand All @@ -182,7 +182,7 @@ void RunBrpcPushSparse() {
setenv("http_proxy", "", 1);
setenv("https_proxy", "", 1);
auto ph_host = paddle::distributed::PSHost(ip_, port_, 0);
host_sign_list_.push_back(ph_host.serialize_to_string());
host_sign_list_.push_back(ph_host.SerializeToString());

// Srart Server
std::thread server_thread(RunServer);
Expand Down
14 changes: 7 additions & 7 deletions paddle/fluid/distributed/test/dense_table_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ TEST(CommonDenseTable, Adam) {
common_config->add_params("LearningRate");
common_config->add_dims(1);
common_config->add_initializers("fill_constant&5e-6");
auto ret = table->initialize(table_config, fs_config);
auto ret = table->Initialize(table_config, fs_config);
ASSERT_EQ(ret, 0);

// pull parameters for create and check
std::vector<float> init_values;
init_values.resize(fea_dim);
table->pull_dense(init_values.data(), fea_dim);
table->PullDense(init_values.data(), fea_dim);

// push gradient
std::vector<std::vector<float>> trainer_gradient_values;
Expand All @@ -85,12 +85,12 @@ TEST(CommonDenseTable, Adam) {
// for adam
for (int i = 0; i < trainers; i++) {
auto &push_values = trainer_gradient_values[i];
table->push_dense(push_values.data(), push_values.size());
table->PushDense(push_values.data(), push_values.size());
}

std::vector<float> pull_values;
pull_values.resize(fea_dim);
table->pull_dense(pull_values.data(), fea_dim);
table->PushDense(pull_values.data(), fea_dim);

float mom_rate = 0.99;
float decay_rate = 0.9999;
Expand Down Expand Up @@ -143,13 +143,13 @@ TEST(CommonDenseTable, SGD) {
common_config->add_params("LearningRate");
common_config->add_dims(1);
common_config->add_initializers("fill_constant&1.0");
auto ret = table->initialize(table_config, fs_config);
auto ret = table->Initialize(table_config, fs_config);
ASSERT_EQ(ret, 0);

// pull parameters for create and check
std::vector<float> init_values;
init_values.resize(fea_dim);
table->pull_dense(init_values.data(), fea_dim);
table->PullDense(init_values.data(), fea_dim);

std::vector<float> total_gradients;
total_gradients.resize(fea_dim);
Expand All @@ -172,7 +172,7 @@ TEST(CommonDenseTable, SGD) {
for (int i = 0; i < trainers; i++) {
auto &push_values = trainer_gradient_values[i];
auto task = [table, &push_values] {
table->push_dense(push_values.data(), push_values.size());
table->PullDense(push_values.data(), push_values.size());
};
task_status.push_back(pool_->enqueue(std::move(task)));
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/test/memory_sparse_table_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ TEST(MemorySparseTable, SGD) {
}

MemorySparseTable *ctr_table = dynamic_cast<MemorySparseTable *>(table);
ctr_table->save_local_fs("./work/table.save", "0", "test");
ctr_table->SaveLocalFS("./work/table.save", "0", "test");
}

} // namespace distributed
Expand Down
2 changes: 1 addition & 1 deletion paddle/fluid/distributed/test/table_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ TEST(Table, Initialize) {
FsClientParameter fs_config;
// case 1. no accessor
Table *table = new SparseGeoTable();
auto ret = table->initialize(table_config, fs_config);
auto ret = table->Initialize(table_config, fs_config);
ASSERT_EQ(ret, -1);
}
} // namespace distributed
Expand Down