test_symbolic_ops.py 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193
  1. import unittest
  2. from tinygrad.shape.symbolic import Variable
  3. from tinygrad.helpers import getenv
  4. from tinygrad.tensor import Tensor
  5. from examples.gpt2 import Attention
  6. import numpy as np
  7. class TestSymbolicOps(unittest.TestCase):
  8. def test_plus1(self):
  9. def f(a): return (a+1).realize()
  10. for i in range(1, 5):
  11. vi = Variable("i", 1, 10).bind(i)
  12. a = Tensor.rand(3, i)
  13. symbolic = f(a.reshape(3, vi)).reshape(3, i).numpy()
  14. expected = f(a).numpy()
  15. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  16. def test_add(self):
  17. def f(a, b): return (a+b).realize()
  18. for i in range(1, 5):
  19. vi = Variable("i", 1, 10).bind(i)
  20. a = Tensor.rand(3, i)
  21. b = Tensor.rand(3, i)
  22. symbolic = f(a.reshape(3, vi), b.reshape(3, vi)).reshape(3, i).numpy()
  23. expected = f(a, b).numpy()
  24. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  25. def test_matmul(self):
  26. def f(a, b): return (a@b).realize()
  27. for i in range(1, 5):
  28. vi = Variable("i", 1, 10).bind(i)
  29. a = Tensor.rand(3, i)
  30. b = Tensor.rand(i, 5)
  31. symbolic = f(a.reshape(3, vi), b.reshape(vi, 5)).numpy()
  32. expected = f(a, b).numpy()
  33. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  34. def test_attention(self, dropout_p=0.0):
  35. def f(q, k, v): return Tensor.scaled_dot_product_attention(q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2), dropout_p=dropout_p).realize()
  36. for i in range(1, 5):
  37. vi = Variable("i", 1, 10).bind(i)
  38. q = Tensor.rand(2, 1, 4, 8)
  39. k = Tensor.rand(2, i, 4, 8)
  40. v = Tensor.rand(2, i, 4, 8)
  41. symbolic = f(q, k.reshape(2, vi, 4, 8), v.reshape(2, vi, 4, 8)).reshape(2, 4, 1, 8).numpy()
  42. expected = f(q, k, v).numpy()
  43. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  44. @unittest.skipIf(getenv("MOCKHIP"), "MOCKHIP only compiles and does not run")
  45. def test_attention_training(self):
  46. with Tensor.train():
  47. self.test_attention(dropout_p=0.0)
  48. with self.assertRaises(AssertionError):
  49. # symbolic shape dropout is not supported
  50. self.test_attention(dropout_p=0.5)
  51. def test_attention_pos_0_sz_0(self):
  52. Attention(128, 8)(Tensor.ones(1, 0, 128), Variable("start_pos", 0, 128).bind(0), None)
  53. def test_attention_pos_0_sz_1(self):
  54. Attention(128, 8)(Tensor.ones(1, 1, 128), Variable("start_pos", 0, 128).bind(0), None)
  55. def test_attention_pos_0_sz_2(self):
  56. Attention(128, 8)(Tensor.ones(1, 2, 128), Variable("start_pos", 0, 128).bind(0), None)
  57. def test_cat_dim0(self):
  58. def f(a, b): return a.cat(b, dim=0).realize()
  59. for i in range(1, 5):
  60. vi = Variable("i", 1, 10).bind(i)
  61. a = Tensor.rand(i, 3)
  62. b = Tensor.rand(2, 3)
  63. symbolic = f(a.reshape(vi, 3), b).reshape(i+2, 3).numpy()
  64. expected = f(a, b).numpy()
  65. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  66. def test_cat_dim1(self):
  67. def f(a, b): return a.cat(b, dim=1).realize()
  68. for i in range(1, 5):
  69. vi = Variable("i", 1, 10).bind(i)
  70. a = Tensor.rand(3, i)
  71. b = Tensor.rand(3, 2)
  72. symbolic = f(a.reshape(3, vi), b).reshape(3, i+2).numpy()
  73. expected = f(a, b).numpy()
  74. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  75. def test_cat_dim0_two_vars(self):
  76. def f(a, b): return a.cat(b, dim=0).realize()
  77. for i in range(1, 5):
  78. for j in range(1, 5):
  79. vi = Variable("i", 1, 10).bind(i)
  80. vj = Variable("j", 1, 10).bind(j)
  81. a = Tensor.rand(i, 3)
  82. b = Tensor.rand(j, 3)
  83. symbolic = f(a.reshape(vi, 3), b.reshape(vj, 3)).reshape(i+j, 3).numpy()
  84. expected = f(a, b).numpy()
  85. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  86. def test_cat_dim1_two_vars(self):
  87. def f(a, b): return a.cat(b, dim=1).realize()
  88. for i in range(1, 5):
  89. for j in range(1, 5):
  90. vi = Variable("i", 1, 10).bind(i)
  91. vj = Variable("j", 1, 10).bind(j)
  92. a = Tensor.rand(3, i)
  93. b = Tensor.rand(3, j)
  94. symbolic = f(a.reshape(3, vi), b.reshape(3, vj)).reshape(3, i+j).numpy()
  95. expected = f(a, b).numpy()
  96. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  97. def test_two_vars_plus1_ij(self):
  98. def f(a, b): return (a@b+1).realize()
  99. for i in range(1, 5):
  100. for j in range(1, 5):
  101. vi = Variable("i", 1, 10).bind(i)
  102. vj = Variable("j", 1, 10).bind(j)
  103. a = Tensor.rand(i, 3)
  104. b = Tensor.rand(3, j)
  105. symbolic = f(a.reshape(vi, 3), b.reshape(3, vj)).reshape(i, j).numpy()
  106. expected = f(a, b).numpy()
  107. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  108. def test_two_vars_plus1_ji(self):
  109. # reverse the order of variables
  110. def f(a, b): return (a@b+1).realize()
  111. for i in range(1, 5):
  112. for j in range(1, 5):
  113. vi = Variable("i", 1, 10).bind(i)
  114. vj = Variable("j", 1, 10).bind(j)
  115. a = Tensor.rand(j, 3)
  116. b = Tensor.rand(3, i)
  117. symbolic = f(a.reshape(vj, 3), b.reshape(3, vi)).reshape(j, i).numpy()
  118. expected = f(a, b).numpy()
  119. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  120. def test_shrink(self):
  121. for i in range(1, 5):
  122. vi = Variable("i", 1, 10).bind(i)
  123. a = Tensor.rand(7, 11)
  124. symbolic = a.shrink(((3,5),(vi,vi+2)))
  125. symbolic = symbolic.numpy()
  126. expected = a.shrink(((3,5),(i,i+2))).numpy()
  127. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  128. def test_ones_sum(self):
  129. for i in range(1, 5):
  130. vi = Variable("i", 1, 10).bind(i)
  131. t = Tensor.ones(i)
  132. symbolic = t.reshape(vi).sum().item()
  133. expected = t.sum().item()
  134. np.testing.assert_equal(symbolic, expected)
  135. def test_mean(self):
  136. for i in range(1, 5):
  137. vi = Variable("i", 1, 10).bind(i)
  138. for axis in [None, 0, 1]:
  139. a = Tensor.rand(i, 3)
  140. expected = a.mean(axis).numpy()
  141. symbolic = a.reshape(vi, 3).mean(axis).reshape(expected.shape).numpy()
  142. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  143. def test_mean_2d(self):
  144. for i in range(1, 5):
  145. for j in range(1, 5):
  146. vi = Variable("i", 1, 10).bind(i)
  147. vj = Variable("j", 1, 10).bind(j)
  148. for axis in [None, 0, 1]:
  149. a = Tensor.rand(i, j)
  150. expected = a.mean(axis).numpy()
  151. symbolic = a.reshape(vi, vj).mean(axis).reshape(expected.shape).numpy()
  152. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  153. def test_var(self):
  154. for i in range(1, 5):
  155. vi = Variable("i", 1, 10).bind(i)
  156. for axis in [None, 0, 1]:
  157. a = Tensor.rand(i, 3)
  158. expected = a.var(axis).numpy()
  159. symbolic = a.reshape(vi, 3).var(axis).reshape(expected.shape).numpy()
  160. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  161. def test_var_2d(self):
  162. for i in range(1, 5):
  163. for j in range(1, 5):
  164. vi = Variable("i", 1, 10).bind(i)
  165. vj = Variable("j", 1, 10).bind(j)
  166. for axis in [None, 0, 1]:
  167. a = Tensor.rand(i, j)
  168. expected = a.var(axis).numpy()
  169. symbolic = a.reshape(vi, vj).var(axis).reshape(expected.shape).numpy()
  170. np.testing.assert_allclose(symbolic, expected, atol=1e-6, rtol=1e-6)
  171. if __name__ == '__main__':
  172. unittest.main()