我们从Python开源项目中,提取了以下10个代码示例,用于说明如何使用torch.gesv()。
def backward(ctx, grad_output): L, = ctx.saved_variables if ctx.upper: L = L.t() grad_output = grad_output.t() # make sure not to double-count variation, since # only half of output matrix is unique Lbar = grad_output.tril() P = Potrf.phi(torch.mm(L.t(), Lbar)) S = torch.gesv(P + P.t(), L.t())[0] S = torch.gesv(S.t(), L.t())[0] S = Potrf.phi(S) return S, None
def test_gesv(self): a = torch.Tensor(((6.80, -2.11, 5.66, 5.97, 8.23), (-6.05, -3.30, 5.36, -4.44, 1.08), (-0.45, 2.58, -2.70, 0.27, 9.04), (8.32, 2.71, 4.35, -7.17, 2.14), (-9.67, -5.14, -7.26, 6.08, -6.87))).t() b = torch.Tensor(((4.02, 6.19, -8.22, -7.57, -3.03), (-1.56, 4.00, -8.67, 1.75, 2.86), (9.81, -4.09, -4.57, -8.61, 8.99))).t() res1 = torch.gesv(b,a)[0] self.assertLessEqual(b.dist(torch.mm(a, res1)), 1e-12) ta = torch.Tensor() tb = torch.Tensor() res2 = torch.gesv(tb, ta, b, a)[0] res3 = torch.gesv(b, a, b, a)[0] self.assertEqual(res1, tb) self.assertEqual(res1, b) self.assertEqual(res1, res2) self.assertEqual(res1, res3) # test reuse res1 = torch.gesv(b, a)[0] ta = torch.Tensor() tb = torch.Tensor() torch.gesv(tb, ta, b, a)[0] self.assertEqual(res1, tb) torch.gesv(tb, ta, b, a)[0] self.assertEqual(res1, tb)
def test_gesv(self): a = torch.Tensor(((6.80, -2.11, 5.66, 5.97, 8.23), (-6.05, -3.30, 5.36, -4.44, 1.08), (-0.45, 2.58, -2.70, 0.27, 9.04), (8.32, 2.71, 4.35, -7.17, 2.14), (-9.67, -5.14, -7.26, 6.08, -6.87))).t() b = torch.Tensor(((4.02, 6.19, -8.22, -7.57, -3.03), (-1.56, 4.00, -8.67, 1.75, 2.86), (9.81, -4.09, -4.57, -8.61, 8.99))).t() res1 = torch.gesv(b, a)[0] self.assertLessEqual(b.dist(torch.mm(a, res1)), 1e-12) ta = torch.Tensor() tb = torch.Tensor() res2 = torch.gesv(b, a, out=(tb, ta))[0] res3 = torch.gesv(b, a, out=(b, a))[0] self.assertEqual(res1, tb) self.assertEqual(res1, b) self.assertEqual(res1, res2) self.assertEqual(res1, res3) # test reuse res1 = torch.gesv(b, a)[0] ta = torch.Tensor() tb = torch.Tensor() torch.gesv(b, a, out=(tb, ta))[0] self.assertEqual(res1, tb) torch.gesv(b, a, out=(tb, ta))[0] self.assertEqual(res1, tb)
def forward(ctx, b, a): # TODO see if one can backprop through LU X, LU = torch.gesv(b, a) ctx.save_for_backward(X, a) ctx.mark_non_differentiable(LU) return X, LU
def backward(ctx, grad_output, grad_LU=None): X, a = ctx.saved_variables grad_b, _ = torch.gesv(grad_output, a.t()) grad_a = -torch.mm(grad_b, X.t()) return grad_b, grad_a
def pre_factor_kkt(Q, G, A): """ Perform all one-time factorizations and cache relevant matrix products""" nineq, nz, neq, _ = get_sizes(G, A) # S = [ A Q^{-1} A^T A Q^{-1} G^T ] # [ G Q^{-1} A^T G Q^{-1} G^T + D^{-1} ] U_Q = torch.potrf(Q) # partial cholesky of S matrix U_S = torch.zeros(neq + nineq, neq + nineq).type_as(Q) G_invQ_GT = torch.mm(G, torch.potrs(G.t(), U_Q)) R = G_invQ_GT if neq > 0: invQ_AT = torch.potrs(A.t(), U_Q) A_invQ_AT = torch.mm(A, invQ_AT) G_invQ_AT = torch.mm(G, invQ_AT) # TODO: torch.potrf sometimes says the matrix is not PSD but # numpy does? I filed an issue at # https://github.com/pytorch/pytorch/issues/199 try: U11 = torch.potrf(A_invQ_AT) except: U11 = torch.Tensor(np.linalg.cholesky( A_invQ_AT.cpu().numpy())).type_as(A_invQ_AT) # TODO: torch.trtrs is currently not implemented on the GPU # and we are using gesv as a workaround. U12 = torch.gesv(G_invQ_AT.t(), U11.t())[0] U_S[:neq, :neq] = U11 U_S[:neq, neq:] = U12 R -= torch.mm(U12.t(), U12) return U_Q, U_S, R
def solve(matrix1, matrix2): solution, _ = torch.gesv(matrix2, matrix1) return solution