Skip to content

Commit

Permalink
[kvstore] Performance improvement for distributed kvstore (dmlc#972)
Browse files Browse the repository at this point in the history
* Performance improvment for distributed kvstore

* update

* update
  • Loading branch information
aksnzhy authored Nov 4, 2019
1 parent fdd0fe6 commit cccde03
Show file tree
Hide file tree
Showing 5 changed files with 172 additions and 331 deletions.
58 changes: 27 additions & 31 deletions examples/mxnet/dis_kvstore/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,46 +17,42 @@ def start_client(args):
client.connect()

# Initialize data on server
client.init_data(name='embed_0', shape=[10, 3], init_type='zero')
client.init_data(name='embed_1', shape=[11, 3], init_type='uniform', low=0.0, high=0.0)
client.init_data(name='embed_2', shape=[11], init_type='zero')
client.init_data(name='embed_0', server_id=0, shape=[5, 3], init_type='zero')
client.init_data(name='embed_0', server_id=1, shape=[6, 3], init_type='zero')
client.init_data(name='embed_1', server_id=0, shape=[5], init_type='uniform', low=0.0, high=0.0)
client.init_data(name='embed_1', server_id=1, shape=[6], init_type='uniform', low=0.0, high=0.0)

tensor_id = mx.nd.array([0, 1, 2], dtype='int64')
tensor_data = mx.nd.array([[0., 0., 0., ], [1., 1., 1.], [2., 2., 2.]])
data_0 = mx.nd.array([[0., 0., 0., ], [1., 1., 1.], [2., 2., 2.]])
data_1 = mx.nd.array([0., 1., 2.])

for i in range(5):
client.push('embed_0', tensor_id, tensor_data)
client.push('embed_1', tensor_id, tensor_data)
client.push('embed_2', tensor_id, mx.nd.array([2., 2., 2.]))

tensor_id = mx.nd.array([6, 7, 8], dtype='int64')
for i in range(5):
client.push('embed_0', tensor_id, tensor_data)
client.push('embed_1', tensor_id, tensor_data)
client.push('embed_2', tensor_id, mx.nd.array([3., 3., 3.]))
client.push(name='embed_0', server_id=0, id_tensor=mx.nd.array([0, 2, 4], dtype='int64'), data_tensor=data_0)
client.push(name='embed_0', server_id=1, id_tensor=mx.nd.array([1, 3, 5], dtype='int64'), data_tensor=data_0)
client.push(name='embed_1', server_id=0, id_tensor=mx.nd.array([0, 2, 4], dtype='int64'), data_tensor=data_1)
client.push(name='embed_1', server_id=1, id_tensor=mx.nd.array([1, 3, 5], dtype='int64'), data_tensor=data_1)

client.barrier()

if client.get_id() == 0:
tensor_id = mx.nd.array([0,1,2,3,4,5,6,7,8,9], dtype='int64')
new_tensor_0 = client.pull('embed_0', tensor_id)
tensor_id = mx.nd.array([0,1,2,3,4,5,6,7,8,9,10], dtype='int64')
new_tensor_1 = client.pull('embed_1', tensor_id)
new_tensor_2 = client.pull('embed_2', tensor_id)
client.pull(name='embed_0', server_id=0, id_tensor=mx.nd.array([0, 1, 2, 3, 4], dtype='int64'))
server_id, new_tensor_0 = client.pull_wait()
assert server_id == 0
client.pull(name='embed_0', server_id=1, id_tensor=mx.nd.array([0, 1, 2, 3, 4, 5], dtype='int64'))
server_id, new_tensor_1 = client.pull_wait()
assert server_id == 1

print("embed_0:")
print(mx.nd.concat(new_tensor_0, new_tensor_1, dim=0))

client.push_all('embed_0', new_tensor_0)
client.push_all('embed_1', new_tensor_1)
client.push_all('embed_2', new_tensor_2)
client.pull(name='embed_1', server_id=0, id_tensor=mx.nd.array([0, 1, 2, 3, 4], dtype='int64'))
server_id, new_tensor_0 = client.pull_wait()
assert server_id == 0
client.pull(name='embed_1', server_id=1, id_tensor=mx.nd.array([0, 1, 2, 3, 4, 5], dtype='int64'))
server_id, new_tensor_1 = client.pull_wait()
assert server_id == 1

new_tensor_3 = client.pull_all('embed_0')
new_tensor_4 = client.pull_all('embed_1')
new_tensor_5 = client.pull_all('embed_2')
print("embed_0: ")
print(new_tensor_3)
print("embed_1: ")
print(new_tensor_4)
print("embed_2: ")
print(new_tensor_5)
print("embed_1:")
print(mx.nd.concat(new_tensor_0, new_tensor_1, dim=0))

# Shut-down all the servers
if client.get_id() == 0:
Expand Down
57 changes: 26 additions & 31 deletions examples/pytorch/dis_kvstore/client.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# This is a simple pytorch client demo shows how to use DGL distributed kvstore.
# In this demo, we initialize two embeddings on server and push/pull data to/from it.
import dgl
import torch
import time
import argparse
import torch as th
Expand All @@ -18,46 +17,42 @@ def start_client(args):
client.connect()

# Initialize data on server
client.init_data(name='embed_0', shape=[10, 3], init_type='zero')
client.init_data(name='embed_1', shape=[11, 3], init_type='uniform', low=0.0, high=0.0)
client.init_data(name='embed_2', shape=[11], init_type='zero')
client.init_data(name='embed_0', server_id=0, shape=[5, 3], init_type='zero')
client.init_data(name='embed_0', server_id=1, shape=[6, 3], init_type='zero')
client.init_data(name='embed_1', server_id=0, shape=[5], init_type='uniform', low=0.0, high=0.0)
client.init_data(name='embed_1', server_id=1, shape=[6], init_type='uniform', low=0.0, high=0.0)

tensor_id = torch.tensor([0, 1, 2])
tensor_data = torch.tensor([[0., 0., 0., ], [1., 1., 1.], [2., 2., 2.]])
data_0 = th.tensor([[0., 0., 0., ], [1., 1., 1.], [2., 2., 2.]])
data_1 = th.tensor([0., 1., 2.])

for i in range(5):
client.push('embed_0', tensor_id, tensor_data)
client.push('embed_1', tensor_id, tensor_data)
client.push('embed_2', tensor_id, th.tensor([2., 2., 2.]))

tensor_id = torch.tensor([6, 7, 8])
for i in range(5):
client.push('embed_0', tensor_id, tensor_data)
client.push('embed_1', tensor_id, tensor_data)
client.push('embed_2', tensor_id, th.tensor([3., 3., 3.]))
client.push(name='embed_0', server_id=0, id_tensor=th.tensor([0, 2, 4]), data_tensor=data_0)
client.push(name='embed_0', server_id=1, id_tensor=th.tensor([1, 3, 5]), data_tensor=data_0)
client.push(name='embed_1', server_id=0, id_tensor=th.tensor([0, 2, 4]), data_tensor=data_1)
client.push(name='embed_1', server_id=1, id_tensor=th.tensor([1, 3, 5]), data_tensor=data_1)

client.barrier()

if client.get_id() == 0:
tensor_id = torch.tensor([0,1,2,3,4,5,6,7,8,9])
new_tensor_0 = client.pull('embed_0', tensor_id)
tensor_id = torch.tensor([0,1,2,3,4,5,6,7,8,9,10])
new_tensor_1 = client.pull('embed_1', tensor_id)
new_tensor_2 = client.pull('embed_2', tensor_id)

client.push_all('embed_0', new_tensor_0)
client.push_all('embed_1', new_tensor_1)
client.push_all('embed_2', new_tensor_2)
client.pull(name='embed_0', server_id=0, id_tensor=th.tensor([0, 1, 2, 3, 4]))
server_id, new_tensor_0 = client.pull_wait()
assert server_id == 0
client.pull(name='embed_0', server_id=1, id_tensor=th.tensor([0, 1, 2, 3, 4, 5]))
server_id, new_tensor_1 = client.pull_wait()
assert server_id == 1

new_tensor_3 = client.pull_all('embed_0')
new_tensor_4 = client.pull_all('embed_1')
new_tensor_5 = client.pull_all('embed_2')
print("embed_0:")
print(new_tensor_3)
print(th.cat([new_tensor_0, new_tensor_1]))

client.pull(name='embed_1', server_id=0, id_tensor=th.tensor([0, 1, 2, 3, 4]))
server_id, new_tensor_0 = client.pull_wait()
assert server_id == 0
client.pull(name='embed_1', server_id=1, id_tensor=th.tensor([0, 1, 2, 3, 4, 5]))
server_id, new_tensor_1 = client.pull_wait()
assert server_id == 1

print("embed_1:")
print(new_tensor_4)
print("embed_2:")
print(new_tensor_5)
print(th.cat([new_tensor_0, new_tensor_1]))

# Shut-down all the servers
if client.get_id() == 0:
Expand Down
Loading

0 comments on commit cccde03

Please sign in to comment.