• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python gradients.jacobian函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tensorflow.python.ops.parallel_for.gradients.jacobian函数的典型用法代码示例。如果您正苦于以下问题:Python jacobian函数的具体用法?Python jacobian怎么用?Python jacobian使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了jacobian函数的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: test_jacobian_fixed_shape

 def test_jacobian_fixed_shape(self):
   x = random_ops.random_uniform([2, 2])
   y = math_ops.matmul(x, x, transpose_a=True)
   jacobian_pfor = gradients.jacobian(y, x, use_pfor=True)
   jacobian_while = gradients.jacobian(y, x, use_pfor=False)
   answer = ops.convert_to_tensor([[
       gradient_ops.gradients(y[0][0], x)[0],
       gradient_ops.gradients(y[0][1], x)[0]
   ], [
       gradient_ops.gradients(y[1][0], x)[0],
       gradient_ops.gradients(y[1][1], x)[0]
   ]])
   self.run_and_assert_equal(answer, jacobian_pfor)
   self.run_and_assert_equal(answer, jacobian_while)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:14,代码来源:gradients_test.py


示例2: create_lstm_hessian

def create_lstm_hessian(batch_size, state_size, steps):
  _, output = lstm_model_fn(batch_size, state_size, steps)
  weights = variables.trainable_variables()
  pfor_jacobians = gradients.jacobian(output, weights, use_pfor=True)
  pfor_hessians = [
      gradients.jacobian(x, weights, use_pfor=True) for x in pfor_jacobians
  ]
  # TODO(agarwal): using two nested while_loop doesn't seem to work here.
  # Hence we use pfor_jacobians for computing while_hessians.
  while_jacobians = pfor_jacobians
  while_hessians = [
      gradients.jacobian(x, weights, use_pfor=False) for x in while_jacobians
  ]
  return pfor_hessians, while_hessians
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:14,代码来源:gradients_test.py


示例3: test_jacobian_unknown_shape

 def test_jacobian_unknown_shape(self):
   with self.test_session() as sess:
     x = array_ops.placeholder(dtypes.float32, shape=[None, None])
     y = math_ops.matmul(x, x, transpose_a=True)
     jacobian_pfor = gradients.jacobian(y, x, use_pfor=True)
     jacobian_while = gradients.jacobian(y, x, use_pfor=False)
     answer = ops.convert_to_tensor([[
         gradient_ops.gradients(y[0][0], x)[0],
         gradient_ops.gradients(y[0][1], x)[0]
     ], [
         gradient_ops.gradients(y[1][0], x)[0],
         gradient_ops.gradients(y[1][1], x)[0]
     ]])
     ans, pfor_value, while_value = sess.run(
         [answer, jacobian_pfor, jacobian_while],
         feed_dict={x: [[1, 2], [3, 4]]})
     self.assertAllClose(ans, pfor_value)
     self.assertAllClose(ans, while_value)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:18,代码来源:gradients_test.py


示例4: test_jacobian_scan_shape

  def test_jacobian_scan_shape(self):
    # Shape x: [3, 4]
    x = random_ops.random_uniform([3, 4])
    elems = random_ops.random_uniform([6])
    # Shape y: [6, 3, 4]
    y = functional_ops.scan(lambda a, e: a + e, elems, initializer=x)
    jacobian = gradients.jacobian(y, x)

    expected_shape = [6, 3, 4, 3, 4]
    self.assertAllEqual(expected_shape, jacobian.shape.as_list())
开发者ID:aeverall,项目名称:tensorflow,代码行数:10,代码来源:gradients_test.py


示例5: test_jacobian_while_loop_shape

  def test_jacobian_while_loop_shape(self):
    # Shape x: [3, 4]
    x = random_ops.random_uniform([3, 4])
    _, y = tf_control_flow_ops.while_loop(lambda i, a: i > 5.,
                                          lambda i, a: (i + 1, a + i),
                                          (constant_op.constant(0.), x))
    # Shape y: [2, 3]
    y = y[:2, :3]
    jacobian = gradients.jacobian(y, x)

    expected_shape = [2, 3, 3, 4]
    self.assertAllEqual(expected_shape, jacobian.shape.as_list())
开发者ID:aeverall,项目名称:tensorflow,代码行数:12,代码来源:gradients_test.py


示例6: create_fc_per_eg_jacobians

def create_fc_per_eg_jacobians(batch_size, activation_size, num_layers):
  model = FullyConnectedModel(activation_size=activation_size,
                              num_layers=num_layers)
  inp = random_ops.random_normal([batch_size, activation_size])
  output = model(inp)
  jacobians = gradients.jacobian(output, variables.trainable_variables())

  def loop_fn(i, use_pfor):
    inp_i = array_ops.expand_dims(array_ops.gather(inp, i), 0)
    output = array_ops.reshape(model(inp_i), [-1])
    return gradients.jacobian(
        output, variables.trainable_variables(), use_pfor=use_pfor)

  per_eg_jacobians_pfor = control_flow_ops.pfor(
      functools.partial(loop_fn, use_pfor=True),
      batch_size)
  per_eg_jacobians_while = control_flow_ops.for_loop(
      functools.partial(loop_fn, use_pfor=False),
      [dtypes.float32] * len(variables.trainable_variables()), batch_size)
  return jacobians, per_eg_jacobians_pfor, per_eg_jacobians_while
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:20,代码来源:gradients_test.py


示例7: loop_fn

 def loop_fn(i, use_pfor):
   inp_i = array_ops.expand_dims(array_ops.gather(inp, i), 0)
   output = array_ops.reshape(model(inp_i), [-1])
   return gradients.jacobian(
       output, variables.trainable_variables(), use_pfor=use_pfor)
开发者ID:LongJun123456,项目名称:tensorflow,代码行数:5,代码来源:gradients_test.py


示例8: test_jacobian_parallel_iterations

 def test_jacobian_parallel_iterations(self):
   x = constant_op.constant([[1., 2], [3, 4]])
   y = math_ops.matmul(x, x)
   self.assertAllClose(gradients.jacobian(y, x, parallel_iterations=2),
                       gradients.jacobian(y, x, parallel_iterations=3))
开发者ID:aeverall,项目名称:tensorflow,代码行数:5,代码来源:gradients_test.py



注:本文中的tensorflow.python.ops.parallel_for.gradients.jacobian函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python parsing_ops.decode_csv函数代码示例发布时间:2022-05-27
下一篇:
Python gradients.batch_jacobian函数代码示例发布时间:2022-05-27
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap