Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
b2f91f2
full coveragre utils
rflamary Jul 24, 2017
c6e648f
test parmap python 3.5
rflamary Jul 24, 2017
a31d3c2
map to list
rflamary Jul 24, 2017
f8e822c
test sinkhorn with empty marginals
rflamary Jul 24, 2017
7d9c5e7
add test optim
rflamary Jul 24, 2017
709d8cb
add dr tests
rflamary Jul 24, 2017
83ecc6d
bregman coverage
rflamary Jul 24, 2017
64cf2fc
tets barycenter
rflamary Jul 24, 2017
33f3d30
clean pep8
rflamary Jul 24, 2017
bd705ed
add test yunmlix and bary
rflamary Jul 24, 2017
f204e98
add test da 58% coverage
rflamary Jul 24, 2017
5aad08a
add test plot
rflamary Jul 24, 2017
a8d7301
add test plot and dataset
rflamary Jul 24, 2017
e11b1d1
test plot with no X
rflamary Jul 24, 2017
11f0652
matplotlib travis
rflamary Jul 24, 2017
46f297f
import nmpy before ot
rflamary Jul 26, 2017
68d7490
numpy assert + n_bins
rflamary Jul 26, 2017
67b011a
numpy assert test_da
rflamary Jul 26, 2017
347e628
n to n_samples
rflamary Jul 26, 2017
4a45135
dr +gpu numpy assert
rflamary Jul 26, 2017
2bc41ad
rng gpu
rflamary Jul 26, 2017
6a02db0
test_optim
rflamary Jul 26, 2017
86418eb
test_optim allclose
rflamary Jul 26, 2017
286de0a
clean test_ot
rflamary Jul 26, 2017
0e06129
Merge branch 'pytest' of github.com:rflamary/POT into pytest
rflamary Jul 26, 2017
81118f2
test_ot random state
rflamary Jul 26, 2017
109fc2a
flake8
rflamary Jul 26, 2017
e0fa14b
flake8
rflamary Jul 26, 2017
d101e08
nearly all review done
rflamary Jul 26, 2017
77037cc
gitignore
rflamary Jul 26, 2017
fac003d
author and license for tets files
rflamary Jul 26, 2017
0097017
add license and authors on all modules
rflamary Jul 26, 2017
251af8e
add author to all examples
rflamary Jul 26, 2017
84aa318
pep8
rflamary Jul 26, 2017
96f8b96
valid flake8
rflamary Jul 26, 2017
838550e
last stuff
rflamary Jul 26, 2017
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
add dr tests
  • Loading branch information
rflamary committed Jul 24, 2017
commit 709d8cbc9f9961a5175eb64ae497b854e0b9b184
63 changes: 63 additions & 0 deletions test/test_dr.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import ot
import numpy as np
import pytest

try: # test if cudamat installed
import ot.dr
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

test for what you really need to test ie if cudamat is available

try: import cudamat has_cudamat = False except ...: has_cudamat = True 
nogo = False
except ImportError:
nogo = True


@pytest.mark.skipif(nogo, reason="Missing modules (autograd or pymanopt)")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

now it says autograd and pymanopt :)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

corrected the comment top of the test file.

def test_fda():

n = 100 # nb samples in source and target datasets
nz = 0.2
np.random.seed(0)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

use RandomState


# generate circle dataset
t = np.random.rand(n) * 2 * np.pi
ys = np.floor((np.arange(n) * 1.0 / n * 3)) + 1
xs = np.concatenate(
(np.cos(t).reshape((-1, 1)), np.sin(t).reshape((-1, 1))), 1)
xs = xs * ys.reshape(-1, 1) + nz * np.random.randn(n, 2)

nbnoise = 8
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

n_features_noise


xs = np.hstack((xs, np.random.randn(n, nbnoise)))

p = 2

Pfda, projfda = ot.dr.fda(xs, ys, p)

projfda(xs)

assert np.allclose(np.sum(Pfda**2, 0), np.ones(p))


@pytest.mark.skipif(nogo, reason="Missing modules (autograd or pymanopt)")
def test_wda():

n = 100 # nb samples in source and target datasets
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

n -> n_samples

nz = 0.2
np.random.seed(0)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

RandomState


# generate circle dataset
t = np.random.rand(n) * 2 * np.pi
ys = np.floor((np.arange(n) * 1.0 / n * 3)) + 1
xs = np.concatenate(
(np.cos(t).reshape((-1, 1)), np.sin(t).reshape((-1, 1))), 1)
xs = xs * ys.reshape(-1, 1) + nz * np.random.randn(n, 2)

nbnoise = 8

xs = np.hstack((xs, np.random.randn(n, nbnoise)))

p = 2

Pwda, projwda = ot.dr.wda(xs, ys, p, maxiter=10)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if you step back a little guessing for a new user what ot.dr.wda means it's a pretty hard challenge :)

maybe you can leaving with a bit more typing and use less jardon/acronyms


projwda(xs)

assert np.allclose(np.sum(Pwda**2, 0), np.ones(p))
53 changes: 28 additions & 25 deletions test/test_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@

@pytest.mark.skipif(nogpu, reason="No GPU available")
def test_gpu_sinkhorn():
import ot.gpu

np.random.seed(0)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

RandomState


def describeRes(r):
print("min:{:.3E}, max::{:.3E}, mean::{:.3E}, std::{:.3E}".format(
Expand Down Expand Up @@ -41,29 +42,31 @@ def describeRes(r):

@pytest.mark.skipif(nogpu, reason="No GPU available")
def test_gpu_sinkhorn_lpl1():
def describeRes(r):
print("min:{:.3E}, max:{:.3E}, mean:{:.3E}, std:{:.3E}"
.format(np.min(r), np.max(r), np.mean(r), np.std(r)))
np.random.seed(0)

def describeRes(r):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

describeRes -> describe_result

no CamelCase in functions

print("min:{:.3E}, max:{:.3E}, mean:{:.3E}, std:{:.3E}"
.format(np.min(r), np.max(r), np.mean(r), np.std(r)))

for n in [50, 100, 500, 1000]:
print(n)
a = np.random.rand(n // 4, 100)
labels_a = np.random.randint(10, size=(n // 4))
b = np.random.rand(n, 100)
time1 = time.time()
transport = ot.da.OTDA_lpl1()
transport.fit(a, labels_a, b)
G1 = transport.G
time2 = time.time()
transport = ot.gpu.da.OTDA_lpl1()
transport.fit(a, labels_a, b)
G2 = transport.G
time3 = time.time()
print("Normal sinkhorn lpl1, time: {:6.2f} sec ".format(
time2 - time1))
describeRes(G1)
print(" GPU sinkhorn lpl1, time: {:6.2f} sec ".format(
time3 - time2))
describeRes(G2)
for n in [50, 100, 500, 1000]:
print(n)
a = np.random.rand(n // 4, 100)
labels_a = np.random.randint(10, size=(n // 4))
b = np.random.rand(n, 100)
time1 = time.time()
transport = ot.da.OTDA_lpl1()
transport.fit(a, labels_a, b)
G1 = transport.G
time2 = time.time()
transport = ot.gpu.da.OTDA_lpl1()
transport.fit(a, labels_a, b)
G2 = transport.G
time3 = time.time()
print("Normal sinkhorn lpl1, time: {:6.2f} sec ".format(
time2 - time1))
describeRes(G1)
print(" GPU sinkhorn lpl1, time: {:6.2f} sec ".format(
time3 - time2))
describeRes(G2)

assert np.allclose(G1, G2, rtol=1e-5, atol=1e-5)
assert np.allclose(G1, G2, rtol=1e-5, atol=1e-5)
4 changes: 2 additions & 2 deletions test/test_optim.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def test_conditional_gradient():

n = 100 # nb bins
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

n_bins


np.random.seed(0)
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

RandomState

# bin positions
x = np.arange(n, dtype=np.float64)

Expand Down Expand Up @@ -38,7 +38,7 @@ def df(G):
def test_generalized_conditional_gradient():

n = 100 # nb bins

np.random.seed(0)
# bin positions
x = np.arange(n, dtype=np.float64)

Expand Down