@@ -130,7 +130,6 @@ def rank_one(X, Y, alpha, size_u, u0=None, v0=None, Z=None, rtol=1e-6, verbose=F
130130 # .. used in conjugate gradient ..
131131 def obj (X_ , Y_ , Z_ , a , b , c , alpha , u0 ):
132132 uv0 = khatri_rao (b , a )
133- u0 = u0 .reshape ((a .size , - 1 ), order = 'C' )
134133 cost = .5 * linalg .norm (Y_ - X_ .matvec (uv0 ) - Z_ .matmat (c ), 'fro' ) ** 2
135134 reg = .5 * alpha * linalg .norm (a - u0 , 'fro' ) ** 2
136135 return cost + reg
@@ -166,8 +165,8 @@ def fprime(w, X_, Y_, Z_, size_u, alpha, u0):
166165 for y_i in Y_split : # TODO; remove
167166 w0_i = w0 .reshape ((size_u + size_v + Z_ .shape [1 ], n_task ), order = 'F' )[:, counter :(counter + y_i .shape [1 ])]
168167 u0_i = u0 [:, counter :(counter + y_i .shape [1 ])]
169- options = {'factr' : rtol / np .finfo (np .float ).eps , 'maxfun' : maxiter }
170- res = optimize .minimize (f , w0_i .ravel (), jac = fprime , method = 'TNC ' , options = options ,
168+ options = {'factr' : rtol / np .finfo (np .float ).eps , 'maxfun' : maxiter , 'verbose' : verbose }
169+ res = optimize .minimize (f , w0_i .ravel (), jac = fprime , method = 'L-BFGS-B ' , options = options ,
171170 args = (X , y_i , Z_ , size_u , alpha , u0_i ), tol = 1e-12 )
172171 #if out[2]['warnflag'] != 0:
173172 # print('Not converged')
0 commit comments