Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NEW: Implement a orthogonal probe relaxation function #101

Draft
wants to merge 32 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
83d1e36
REF: Use variable probe in cgrad forward model
carterbox Mar 4, 2021
9517882
NEW: Implement a variety of PCA algorithms
carterbox Mar 4, 2021
0270db6
NEW: Implement an OPR function
carterbox Mar 4, 2021
3cfb92d
DEV: Prototype variable probe correction for cgrad
carterbox Mar 4, 2021
0382682
BUG: Add missing parameter
carterbox Mar 4, 2021
d312add
NEW: Add probe constraint to center illumination
carterbox Sep 15, 2021
0845509
NEW: Add new ADAM based ptychography method
carterbox Sep 15, 2021
ed59bf6
REF: Center illuminations as a group
carterbox Sep 16, 2021
6139789
NEW: Add a sparsity constraint for probes
carterbox Sep 16, 2021
16a6e66
NEW: Skip probe sparsity when 1
carterbox Sep 16, 2021
179a14d
NEW: Add probe sparsity constraint as option
carterbox Sep 16, 2021
e42be70
BUG: Rolling wrong axes
carterbox Sep 16, 2021
53eb7e7
REF: Smaller sigma for sparsity constraint
carterbox Sep 16, 2021
0cb540d
TST: Test new probe constraints
carterbox Sep 16, 2021
0448698
REF: Move randomizer to random module
carterbox Sep 20, 2021
d4af9f6
BUG: Use correct function name for new random API
carterbox Sep 21, 2021
c871892
REF: Concatenate all times and costs
carterbox Sep 21, 2021
a09bfad
BUG: Use proper syntax for randomizer
carterbox Sep 21, 2021
059de31
REF: Update probes sequentially in ADAM
carterbox Sep 21, 2021
7c3ac13
BUG: Bugs related to returning costs
carterbox Sep 22, 2021
ec19daf
Update probes in ADAM sequentially
carterbox Sep 22, 2021
c3ec8e1
BUG: Cost should always be mvoed to host
carterbox Sep 22, 2021
9a58510
REF: Simplify cost and time reporting
carterbox Sep 22, 2021
e8f5493
REF: Don't always set cost to inf
carterbox Sep 22, 2021
7c7bca5
TST: Plot convergence criteria in tests
carterbox Sep 22, 2021
09cded3
Merge remote-tracking branch 'upstream/master' into opr
carterbox Sep 22, 2021
b0a5067
Merge branch 'master' into opr
carterbox Sep 24, 2021
62e059c
Merge branch 'adam-grad' into opr
carterbox Sep 24, 2021
a346c9e
NEW: Add variable probe to forward model in ADAM
carterbox Sep 24, 2021
d6d9210
TST: Save probes as tiled images
carterbox Sep 24, 2021
f11a948
BUG: Return eigen_probes from cgrad
carterbox Sep 24, 2021
754f639
STUB: play with different ways of implementing OPR and ADAM
carterbox Sep 28, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/tike/opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@

import numpy as np

from tike.random import randomizer

logger = logging.getLogger(__name__)
randomizer = np.random.default_rng()


def batch_indicies(n, m=1, use_random=True):
Expand Down
163 changes: 163 additions & 0 deletions src/tike/pca.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
import unittest
from unittest.case import skip

import numpy as np

from tike.linalg import hermitian as _hermitian
from tike.linalg import pca_eig


def pca_incremental(data, k, S=None, U=None):
"""Principal component analysis with method IIIB from Arora et al (2012).

This method iteratively updates a current guess for the principal
components of a population.

Parameters
----------
data (..., N, D)
Array of N observations of a D dimensional space.
k : int
The desired number of principal components

Returns
-------
S (..., k)
The singular values corresponding to the current principal components
sorted largest to smallest.
U (..., D, k)
The current best principal components of the population.

References
----------
Arora, R., Cotter, A., Livescu, K., & Srebro, N. (2012). Stochastic
optimization for PCA and PLS. 2012 50th Annual Allerton Conference on
Communication, Control, and Computing (Allerton), 863.
https://doi.org/10.1109/Allerton.2012.6483308

"""
ndim = data.shape[-1]
nsamples = data.shape[-2]
lead = data.shape[:-2]
if S is None or U is None:
# TODO: Better inital guess for one sample?
S = np.ones((*lead, k), dtype=data.dtype)
U = np.zeros((*lead, ndim, k), dtype=data.dtype)
U[..., list(range(k)), list(range(k))] = 1
if S.shape != (*lead, k):
raise ValueError('S is the wrong shape', S.shape)
if U.shape != (*lead, ndim, k):
raise ValueError('U is the wrong shape', U.shape)

for m in range(nsamples):

x = data[..., m, :, None]

# (k, d) x (d, 1)
xy = _hermitian(U) @ x

# (d, 1) - (d, d) x (d, 1)
xp = x - U @ _hermitian(U) @ x

# (..., 1, 1)
norm_xp = np.linalg.norm(xp, axis=-2, keepdims=True)

# [
# (k, k), (k, 1)
# (1, k), (1, 1)
# ]
Q = np.empty(shape=(*lead, k + 1, k + 1), dtype=x.dtype)
Q[..., :-1, :-1] = xy @ _hermitian(xy)
Q[..., -1:, -1:] = norm_xp * norm_xp
for i in range(k):
Q[..., i, i] = S[..., i]
Q[..., :-1, -1:] = norm_xp * xy
# Skip one assignment because matrix is conjugate symmetric
# Q[..., -1:, :-1] = norm_xp * _hermitian(xy)
S1, U1 = np.linalg.eigh(Q, UPLO='U')

# [(d, k), (d, 1)] x (k + 1, k + 1)
Utilde = np.concatenate([U, xp / norm_xp], axis=-1) @ U1
Stilde = S1

# Skip sorting because eigh() guarantees vectors already sorted
# order = np.argsort(Stilde, axis=-1)
# Stilde = np.take_along_axis(Stilde, order, axis=-1)
# Utilde = np.take_along_axis(Utilde, order[..., None, :], axis=-1)
S, U = Stilde[..., -1:-(k + 1):-1], Utilde[..., -1:-(k + 1):-1]

return S, U


def pca_svd(data, k):
"""Return k principal components via singular value decomposition.

Parameters
----------
data (..., N, D)
Array of N observations of a D dimensional space.

Returns
-------
W (..., N, k)
The weights projecting the original observations onto k-fold subspace
C (..., k, D)
The k principal components sorted largest to smallest.

"""
U, S, Vh = np.linalg.svd(data, full_matrices=False, compute_uv=True)
assert data.shape == ((U * S[..., None, :]) @ Vh).shape
# svd API states that values returned in descending order. i.e.
# the best vectors are first.
U = U[..., :k]
S = S[..., None, :k]
Vh = Vh[..., :k, :]
return U * S, Vh


class TestPrincipalComponentAnalysis(unittest.TestCase):

def setUp(self, batch=2, sample=100, dimensions=4):
# generates some random uncentered data that is strongly biased towards
# having principal components. The first batch is flipped so the
# principal components start at the last dimension.
np.random.seed(0)
self.data = np.random.normal(
np.random.rand(dimensions),
10 / (np.arange(dimensions) + 1),
size=[batch, sample, dimensions],
)
self.data[0] = self.data[0, ..., ::-1]

def print_metrics(self, W, C, k):
I = C @ _hermitian(C)
np.testing.assert_allclose(
I,
np.tile(np.eye(k), (C.shape[0], 1, 1)),
atol=1e-12,
)
print(
'reconstruction error: ',
np.linalg.norm(W @ C - self.data, axis=(1, 2)),
)
@unittest.skip("Broken due to tsting API change.")
def test_numpy_eig(self, k=2):
S, U = pca_eig(self.data, k)
print('EIG COV principal components\n', U)
self.print_metrics(U, k)

def test_numpy_svd(self, k=2):
W, C = pca_svd(self.data, k)
print('SVD principal components\n', C)
self.print_metrics(W, C, k)

@unittest.skip("Broken due to tsting API change.")
def test_incremental_pca(self, k=2):
S, U = pca_incremental(self.data, k=k)

print('INCREMENTAL principal components\n', U)
self.print_metrics(U, k)


if __name__ == "__main__":
unittest.main()
30 changes: 29 additions & 1 deletion src/tike/ptycho/object.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,37 @@ class ObjectOptions:

"""

def __init__(self, positivity_constraint=0, smoothness_constraint=0):
def __init__(
self,
positivity_constraint=0,
smoothness_constraint=0,
use_adaptive_moment=False,
vdecay=0.6,
mdecay=0.666,
):
self.positivity_constraint = positivity_constraint
self.smoothness_constraint = smoothness_constraint
self.use_adaptive_moment = use_adaptive_moment
self.vdecay = vdecay
self.mdecay = mdecay
self.v = None
self.m = None

def put(self):
"""Copy to the current GPU memory."""
if self.v is not None:
self.v = cp.asarray(self.v)
if self.m is not None:
self.m = cp.asarray(self.m)
return self

def get(self):
"""Copy to the host CPU memory."""
if self.v is not None:
self.v = cp.asnumpy(self.v)
if self.m is not None:
self.m = cp.asnumpy(self.m)
return self


def positivity_constraint(x, r):
Expand Down
Loading