Skip to content

Instantly share code, notes, and snippets.

@gautamborad
Last active June 15, 2021 17:50
Show Gist options
  • Save gautamborad/b1adfd57781882d3e104ff7f8b8945c0 to your computer and use it in GitHub Desktop.
Save gautamborad/b1adfd57781882d3e104ff7f8b8945c0 to your computer and use it in GitHub Desktop.
Eager vs TorchScript print comparison for multi-dimensional (1D, 2D, 3D, 4D) tensors with all possible DTypes
% python test-tensor-printout-differ.py
==================== ALL INT DTYPES =====================================
Eager [1-D] type:torch.uint8
tensor([0, 0, 0], dtype=torch.uint8)
TorchScript [1-D] type:torch.uint8
tensor([0, 0, 0])
[ CPUByteType{3} ]
TorchScript time for (1-D) -> torch.uint8: (0.05977s)!
-----------
Eager [2-D] type:torch.uint8
tensor([[77, 25, 97],
[ 7, 51, 42],
[88, 35, 13]], dtype=torch.uint8)
TorchScript [2-D] type:torch.uint8
tensor([[77, 25, 97],
[ 7, 51, 42],
[88, 35, 13]])
[ CPUByteType{3,3} ]
TorchScript time for (2-D) -> torch.uint8: (0.00224s)!
-----------
Eager [3-D] type:torch.uint8
tensor([[[38, 58, 16],
[79, 46, 40],
[76, 96, 77]],
[[12, 3, 34],
[40, 37, 81],
[19, 31, 94]],
[[ 4, 7, 82],
[52, 93, 32],
[17, 95, 58]]], dtype=torch.uint8)
TorchScript [3-D] type:torch.uint8
tensor([[[38, 58, 16],
[79, 46, 40],
[76, 96, 77]],
[[12, 3, 34],
[40, 37, 81],
[19, 31, 94]],
[[ 4, 7, 82],
[52, 93, 32],
[17, 95, 58]]])
[ CPUByteType{3,3,3} ]
TorchScript time for (3-D) -> torch.uint8: (0.00044s)!
-----------
Eager [4-D] type:torch.uint8
tensor([[[[25, 30, 29],
[28, 79, 80],
[11, 39, 84]],
[[76, 49, 6],
[67, 64, 12],
[75, 90, 97]],
[[98, 76, 39],
[29, 35, 76],
[20, 53, 19]]],
[[[51, 97, 64],
[58, 61, 84],
[85, 78, 46]],
[[54, 44, 43],
[83, 43, 35],
[33, 24, 82]],
[[21, 29, 14],
[35, 21, 98],
[25, 51, 23]]],
[[[93, 82, 45],
[87, 74, 39],
[77, 87, 14]],
[[33, 22, 56],
[61, 17, 83],
[62, 51, 71]],
[[66, 73, 10],
[64, 98, 82],
[ 2, 98, 37]]]], dtype=torch.uint8)
TorchScript [4-D] type:torch.uint8
tensor([[[[25, 30, 29],
[28, 79, 80],
[11, 39, 84]],
[[76, 49, 6],
[67, 64, 12],
[75, 90, 97]],
[[98, 76, 39],
[29, 35, 76],
[20, 53, 19]]],
[[[51, 97, 64],
[58, 61, 84],
[85, 78, 46]],
[[54, 44, 43],
[83, 43, 35],
[33, 24, 82]],
[[21, 29, 14],
[35, 21, 98],
[25, 51, 23]]],
[[[93, 82, 45],
[87, 74, 39],
[77, 87, 14]],
[[33, 22, 56],
[61, 17, 83],
[62, 51, 71]],
[[66, 73, 10],
[64, 98, 82],
[ 2, 98, 37]]]])
[ CPUByteType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.uint8: (0.00121s)!
-----------
Eager [1-D] type:torch.int8
tensor([2, 3, 1], dtype=torch.int8)
TorchScript [1-D] type:torch.int8
tensor([2, 3, 1])
[ CPUCharType{3} ]
TorchScript time for (1-D) -> torch.int8: (0.00020s)!
-----------
Eager [2-D] type:torch.int8
tensor([[73, 20, 7],
[57, 47, 33],
[49, 14, 66]], dtype=torch.int8)
TorchScript [2-D] type:torch.int8
tensor([[73, 20, 7],
[57, 47, 33],
[49, 14, 66]])
[ CPUCharType{3,3} ]
TorchScript time for (2-D) -> torch.int8: (0.00022s)!
-----------
Eager [3-D] type:torch.int8
tensor([[[65, 53, 37],
[49, 25, 37],
[ 8, 84, 83]],
[[84, 60, 2],
[ 5, 96, 19],
[88, 43, 67]],
[[96, 58, 42],
[44, 69, 20],
[33, 3, 30]]], dtype=torch.int8)
TorchScript [3-D] type:torch.int8
tensor([[[65, 53, 37],
[49, 25, 37],
[ 8, 84, 83]],
[[84, 60, 2],
[ 5, 96, 19],
[88, 43, 67]],
[[96, 58, 42],
[44, 69, 20],
[33, 3, 30]]])
[ CPUCharType{3,3,3} ]
TorchScript time for (3-D) -> torch.int8: (0.00041s)!
-----------
Eager [4-D] type:torch.int8
tensor([[[[ 9, 20, 48],
[21, 99, 92],
[13, 4, 53]],
[[59, 42, 7],
[25, 0, 29],
[19, 87, 64]],
[[40, 38, 33],
[ 0, 94, 68],
[34, 70, 71]]],
[[[46, 41, 27],
[18, 55, 3],
[81, 37, 68]],
[[22, 78, 10],
[16, 90, 0],
[53, 87, 58]],
[[11, 64, 73],
[91, 13, 45],
[19, 60, 17]]],
[[[56, 18, 41],
[45, 44, 35],
[45, 94, 12]],
[[71, 22, 63],
[ 4, 41, 14],
[72, 40, 64]],
[[44, 89, 75],
[25, 5, 44],
[ 1, 17, 25]]]], dtype=torch.int8)
TorchScript [4-D] type:torch.int8
tensor([[[[ 9, 20, 48],
[21, 99, 92],
[13, 4, 53]],
[[59, 42, 7],
[25, 0, 29],
[19, 87, 64]],
[[40, 38, 33],
[ 0, 94, 68],
[34, 70, 71]]],
[[[46, 41, 27],
[18, 55, 3],
[81, 37, 68]],
[[22, 78, 10],
[16, 90, 0],
[53, 87, 58]],
[[11, 64, 73],
[91, 13, 45],
[19, 60, 17]]],
[[[56, 18, 41],
[45, 44, 35],
[45, 94, 12]],
[[71, 22, 63],
[ 4, 41, 14],
[72, 40, 64]],
[[44, 89, 75],
[25, 5, 44],
[ 1, 17, 25]]]])
[ CPUCharType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.int8: (0.00093s)!
-----------
Eager [1-D] type:torch.int16
tensor([1, 3, 3], dtype=torch.int16)
TorchScript [1-D] type:torch.int16
tensor([1, 3, 3])
[ CPUShortType{3} ]
TorchScript time for (1-D) -> torch.int16: (0.00015s)!
-----------
Eager [2-D] type:torch.int16
tensor([[10, 74, 52],
[12, 6, 63],
[66, 92, 51]], dtype=torch.int16)
TorchScript [2-D] type:torch.int16
tensor([[10, 74, 52],
[12, 6, 63],
[66, 92, 51]])
[ CPUShortType{3,3} ]
TorchScript time for (2-D) -> torch.int16: (0.00021s)!
-----------
Eager [3-D] type:torch.int16
tensor([[[73, 30, 77],
[70, 22, 7],
[78, 75, 71]],
[[83, 76, 48],
[75, 53, 43],
[ 6, 36, 92]],
[[84, 82, 3],
[11, 50, 48],
[36, 46, 82]]], dtype=torch.int16)
TorchScript [3-D] type:torch.int16
tensor([[[73, 30, 77],
[70, 22, 7],
[78, 75, 71]],
[[83, 76, 48],
[75, 53, 43],
[ 6, 36, 92]],
[[84, 82, 3],
[11, 50, 48],
[36, 46, 82]]])
[ CPUShortType{3,3,3} ]
TorchScript time for (3-D) -> torch.int16: (0.00038s)!
-----------
Eager [4-D] type:torch.int16
tensor([[[[13, 19, 2],
[87, 19, 14],
[24, 83, 71]],
[[52, 55, 86],
[39, 77, 61],
[28, 81, 19]],
[[ 1, 72, 29],
[24, 71, 66],
[94, 94, 84]]],
[[[82, 92, 84],
[65, 50, 49],
[31, 56, 75]],
[[77, 41, 0],
[29, 76, 66],
[97, 23, 73]],
[[23, 73, 25],
[39, 59, 71],
[55, 83, 27]]],
[[[50, 83, 77],
[41, 16, 90],
[83, 61, 72]],
[[ 3, 95, 38],
[98, 39, 77],
[97, 19, 41]],
[[57, 61, 61],
[23, 79, 13],
[75, 33, 59]]]], dtype=torch.int16)
TorchScript [4-D] type:torch.int16
tensor([[[[13, 19, 2],
[87, 19, 14],
[24, 83, 71]],
[[52, 55, 86],
[39, 77, 61],
[28, 81, 19]],
[[ 1, 72, 29],
[24, 71, 66],
[94, 94, 84]]],
[[[82, 92, 84],
[65, 50, 49],
[31, 56, 75]],
[[77, 41, 0],
[29, 76, 66],
[97, 23, 73]],
[[23, 73, 25],
[39, 59, 71],
[55, 83, 27]]],
[[[50, 83, 77],
[41, 16, 90],
[83, 61, 72]],
[[ 3, 95, 38],
[98, 39, 77],
[97, 19, 41]],
[[57, 61, 61],
[23, 79, 13],
[75, 33, 59]]]])
[ CPUShortType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.int16: (0.00086s)!
-----------
Eager [1-D] type:torch.int32
tensor([1, 3, 0], dtype=torch.int32)
TorchScript [1-D] type:torch.int32
tensor([1, 3, 0])
[ CPUIntType{3} ]
TorchScript time for (1-D) -> torch.int32: (0.00015s)!
-----------
Eager [2-D] type:torch.int32
tensor([[65, 90, 31],
[56, 19, 28],
[56, 88, 34]], dtype=torch.int32)
TorchScript [2-D] type:torch.int32
tensor([[65, 90, 31],
[56, 19, 28],
[56, 88, 34]])
[ CPUIntType{3,3} ]
TorchScript time for (2-D) -> torch.int32: (0.00021s)!
-----------
Eager [3-D] type:torch.int32
tensor([[[55, 95, 42],
[83, 14, 92],
[90, 79, 74]],
[[57, 26, 21],
[73, 60, 47],
[21, 16, 0]],
[[ 8, 95, 83],
[29, 55, 71],
[28, 10, 24]]], dtype=torch.int32)
TorchScript [3-D] type:torch.int32
tensor([[[55, 95, 42],
[83, 14, 92],
[90, 79, 74]],
[[57, 26, 21],
[73, 60, 47],
[21, 16, 0]],
[[ 8, 95, 83],
[29, 55, 71],
[28, 10, 24]]])
[ CPUIntType{3,3,3} ]
TorchScript time for (3-D) -> torch.int32: (0.00038s)!
-----------
Eager [4-D] type:torch.int32
tensor([[[[47, 29, 0],
[27, 25, 11],
[94, 79, 71]],
[[41, 72, 27],
[40, 6, 31],
[72, 80, 6]],
[[83, 71, 91],
[64, 80, 23],
[48, 75, 90]]],
[[[53, 4, 97],
[78, 63, 94],
[ 0, 9, 90]],
[[30, 44, 39],
[70, 66, 59],
[95, 27, 69]],
[[45, 4, 46],
[77, 79, 66],
[63, 89, 22]]],
[[[95, 94, 53],
[54, 74, 27],
[81, 72, 77]],
[[29, 6, 17],
[17, 55, 4],
[23, 48, 28]],
[[13, 21, 96],
[19, 23, 77],
[19, 4, 47]]]], dtype=torch.int32)
TorchScript [4-D] type:torch.int32
tensor([[[[47, 29, 0],
[27, 25, 11],
[94, 79, 71]],
[[41, 72, 27],
[40, 6, 31],
[72, 80, 6]],
[[83, 71, 91],
[64, 80, 23],
[48, 75, 90]]],
[[[53, 4, 97],
[78, 63, 94],
[ 0, 9, 90]],
[[30, 44, 39],
[70, 66, 59],
[95, 27, 69]],
[[45, 4, 46],
[77, 79, 66],
[63, 89, 22]]],
[[[95, 94, 53],
[54, 74, 27],
[81, 72, 77]],
[[29, 6, 17],
[17, 55, 4],
[23, 48, 28]],
[[13, 21, 96],
[19, 23, 77],
[19, 4, 47]]]])
[ CPUIntType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.int32: (0.00099s)!
-----------
Eager [1-D] type:torch.int64
tensor([1, 1, 2])
TorchScript [1-D] type:torch.int64
tensor([1, 1, 2])
[ CPULongType{3} ]
TorchScript time for (1-D) -> torch.int64: (0.00016s)!
-----------
Eager [2-D] type:torch.int64
tensor([[72, 72, 7],
[13, 4, 22],
[84, 78, 62]])
TorchScript [2-D] type:torch.int64
tensor([[72, 72, 7],
[13, 4, 22],
[84, 78, 62]])
[ CPULongType{3,3} ]
TorchScript time for (2-D) -> torch.int64: (0.00021s)!
-----------
Eager [3-D] type:torch.int64
tensor([[[52, 20, 16],
[82, 74, 60],
[20, 65, 46]],
[[46, 70, 54],
[20, 52, 37],
[ 5, 42, 93]],
[[58, 13, 15],
[99, 67, 98],
[81, 57, 8]]])
TorchScript [3-D] type:torch.int64
tensor([[[52, 20, 16],
[82, 74, 60],
[20, 65, 46]],
[[46, 70, 54],
[20, 52, 37],
[ 5, 42, 93]],
[[58, 13, 15],
[99, 67, 98],
[81, 57, 8]]])
[ CPULongType{3,3,3} ]
TorchScript time for (3-D) -> torch.int64: (0.00039s)!
-----------
Eager [4-D] type:torch.int64
tensor([[[[84, 65, 29],
[14, 60, 76],
[26, 65, 11]],
[[49, 13, 4],
[24, 33, 82],
[ 5, 34, 28]],
[[90, 4, 57],
[17, 90, 23],
[44, 22, 57]]],
[[[86, 47, 90],
[ 3, 25, 92],
[85, 29, 48]],
[[95, 60, 28],
[48, 20, 44],
[79, 50, 84]],
[[64, 95, 79],
[ 2, 79, 86],
[43, 80, 21]]],
[[[38, 63, 92],
[36, 86, 59],
[ 9, 46, 13]],
[[ 5, 73, 87],
[39, 39, 23],
[49, 49, 40]],
[[66, 61, 35],
[ 6, 6, 94],
[20, 4, 36]]]])
TorchScript [4-D] type:torch.int64
tensor([[[[84, 65, 29],
[14, 60, 76],
[26, 65, 11]],
[[49, 13, 4],
[24, 33, 82],
[ 5, 34, 28]],
[[90, 4, 57],
[17, 90, 23],
[44, 22, 57]]],
[[[86, 47, 90],
[ 3, 25, 92],
[85, 29, 48]],
[[95, 60, 28],
[48, 20, 44],
[79, 50, 84]],
[[64, 95, 79],
[ 2, 79, 86],
[43, 80, 21]]],
[[[38, 63, 92],
[36, 86, 59],
[ 9, 46, 13]],
[[ 5, 73, 87],
[39, 39, 23],
[49, 49, 40]],
[[66, 61, 35],
[ 6, 6, 94],
[20, 4, 36]]]])
[ CPULongType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.int64: (0.00107s)!
-----------
==================== ALL INT DTYPES END =====================================
==================== ALL FP DTYPES =====================================
Eager [1-D] type:torch.float32
tensor([-0.3073, 0.4040, 0.3253, -0.5713])
TorchScript [1-D] type:torch.float32
tensor([-0.3073, 0.4040, 0.3253, -0.5713])
[ CPUFloatType{4} ]
TorchScript time for (1-D) -> torch.float32: (0.00063s)!
-----------
Eager [2-D] type:torch.float32
tensor([[ 0.4579, -3.2371, 1.4674],
[ 0.3214, 1.7327, -0.6448],
[-1.8258, -1.0259, -1.6016]])
TorchScript [2-D] type:torch.float32
tensor([[ 0.4579, -3.2371, 1.4674],
[ 0.3214, 1.7327, -0.6448],
[-1.8258, -1.0259, -1.6016]])
[ CPUFloatType{3,3} ]
TorchScript time for (2-D) -> torch.float32: (0.00077s)!
-----------
Eager [3-D] type:torch.float32
tensor([[[ 0.4109, 1.0147, -0.9307],
[-1.6440, 0.7993, -0.1779],
[-0.1842, -0.2166, -0.7442]],
[[-0.5190, 0.0238, 1.2196],
[ 0.5836, 0.1682, 1.3176],
[ 1.0771, -0.7681, -0.9892]],
[[ 0.5262, 1.0765, -1.3708],
[-1.5159, 1.0037, -0.7561],
[-0.8218, -0.6919, 0.4108]]])
TorchScript [3-D] type:torch.float32
tensor([[[ 0.4109, 1.0147, -0.9307],
[-1.6440, 0.7993, -0.1779],
[-0.1842, -0.2166, -0.7442]],
[[-0.5190, 0.0238, 1.2196],
[ 0.5836, 0.1682, 1.3176],
[ 1.0771, -0.7681, -0.9892]],
[[ 0.5262, 1.0765, -1.3708],
[-1.5159, 1.0037, -0.7561],
[-0.8218, -0.6919, 0.4108]]])
[ CPUFloatType{3,3,3} ]
TorchScript time for (3-D) -> torch.float32: (0.00106s)!
-----------
Eager [4-D] type:torch.float32
tensor([[[[ 0.4899, -0.1890, 1.1259],
[ 0.5896, -0.8323, -0.6358],
[ 0.7046, 0.3127, -0.0218]],
[[-0.7865, 1.2937, 0.1580],
[-1.0523, -1.8295, -0.9305],
[-0.2119, 0.6773, 0.4163]],
[[ 0.9235, 0.3577, 0.5023],
[ 0.7347, 0.9612, -0.0580],
[ 1.5654, 0.5287, 1.0593]]],
[[[ 1.0122, 0.0997, -0.0334],
[ 0.6290, -0.7017, -0.6356],
[-2.3973, -1.5617, -0.4433]],
[[-0.4647, 0.6017, 1.6494],
[-0.0876, -2.1217, -1.3894],
[-0.7643, 3.1418, 1.6277]],
[[-0.3172, 0.3472, -0.3686],
[ 0.9008, -1.2584, -0.5677],
[-0.4519, 1.4782, -1.5896]]],
[[[-1.0053, 1.3470, -0.3163],
[-0.4936, 0.8158, -0.0089],
[-0.1081, 0.8382, 0.5489]],
[[ 0.2577, -1.1369, -1.6591],
[-0.8375, 0.5957, 0.7530],
[-0.6346, -1.6054, -0.2069]],
[[ 1.1649, 0.6896, -0.3767],
[-0.6605, 0.3400, 0.5177],
[ 0.1551, -1.1989, 1.0671]]]])
TorchScript [4-D] type:torch.float32
tensor([[[[ 0.4899, -0.1890, 1.1259],
[ 0.5896, -0.8323, -0.6358],
[ 0.7046, 0.3127, -0.0218]],
[[-0.7865, 1.2937, 0.1580],
[-1.0523, -1.8295, -0.9305],
[-0.2119, 0.6773, 0.4163]],
[[ 0.9235, 0.3577, 0.5023],
[ 0.7347, 0.9612, -0.0580],
[ 1.5654, 0.5287, 1.0593]]],
[[[ 1.0122, 0.0997, -0.0334],
[ 0.6290, -0.7017, -0.6356],
[-2.3973, -1.5617, -0.4433]],
[[-0.4647, 0.6017, 1.6494],
[-0.0876, -2.1217, -1.3894],
[-0.7643, 3.1418, 1.6277]],
[[-0.3172, 0.3472, -0.3686],
[ 0.9008, -1.2584, -0.5677],
[-0.4519, 1.4782, -1.5896]]],
[[[-1.0053, 1.3470, -0.3163],
[-0.4936, 0.8158, -0.0089],
[-0.1081, 0.8382, 0.5489]],
[[ 0.2577, -1.1369, -1.6591],
[-0.8375, 0.5957, 0.7530],
[-0.6346, -1.6054, -0.2069]],
[[ 1.1649, 0.6896, -0.3767],
[-0.6605, 0.3400, 0.5177],
[ 0.1551, -1.1989, 1.0671]]]])
[ CPUFloatType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.float32: (0.00171s)!
-----------
Eager [1-D] type:torch.float64
tensor([0.1815, 0.3897, 0.5089, 0.5889], dtype=torch.float64)
TorchScript [1-D] type:torch.float64
tensor([0.1815, 0.3897, 0.5089, 0.5889])
[ CPUDoubleType{4} ]
TorchScript time for (1-D) -> torch.float64: (0.00051s)!
-----------
Eager [2-D] type:torch.float64
tensor([[-0.4908, 0.4824, 0.4445],
[ 0.2944, 1.9775, 2.5023],
[-0.1708, 0.4906, -1.7399]], dtype=torch.float64)
TorchScript [2-D] type:torch.float64
tensor([[-0.4908, 0.4824, 0.4445],
[ 0.2944, 1.9775, 2.5023],
[-0.1708, 0.4906, -1.7399]])
[ CPUDoubleType{3,3} ]
TorchScript time for (2-D) -> torch.float64: (0.00061s)!
-----------
Eager [3-D] type:torch.float64
tensor([[[ 0.7037, 0.7411, -1.5748],
[-0.2720, -1.0845, 0.2235],
[ 0.8773, -0.6932, -0.1456]],
[[-0.5914, 1.0142, -1.2471],
[-0.0416, 0.3675, 1.1532],
[-1.4098, 0.5623, 2.3425]],
[[ 0.5346, 0.2349, -0.9374],
[-0.0556, 1.0823, -0.2780],
[ 1.6469, 1.1188, -1.8395]]], dtype=torch.float64)
TorchScript [3-D] type:torch.float64
tensor([[[ 0.7037, 0.7411, -1.5748],
[-0.2720, -1.0845, 0.2235],
[ 0.8773, -0.6932, -0.1456]],
[[-0.5914, 1.0142, -1.2471],
[-0.0416, 0.3675, 1.1532],
[-1.4098, 0.5623, 2.3425]],
[[ 0.5346, 0.2349, -0.9374],
[-0.0556, 1.0823, -0.2780],
[ 1.6469, 1.1188, -1.8395]]])
[ CPUDoubleType{3,3,3} ]
TorchScript time for (3-D) -> torch.float64: (0.00097s)!
-----------
Eager [4-D] type:torch.float64
tensor([[[[-0.3672, 0.7810, -0.1067],
[ 0.6408, -1.1704, 0.8386],
[ 0.5624, 0.8729, 1.0339]],
[[ 0.2134, 1.9195, -0.0735],
[ 1.1209, -0.3472, -0.0196],
[-0.7719, -1.3813, 0.5966]],
[[ 0.7228, 0.2171, 0.3392],
[-0.6301, -1.0267, -1.1787],
[ 1.1477, 0.2301, -1.7894]]],
[[[ 1.4600, 0.1159, -0.1669],
[-1.1039, 1.2629, 0.5024],
[-0.4008, 0.0417, -0.4760]],
[[ 0.4487, -1.5529, -0.1530],
[ 0.8056, 1.3810, 0.8046],
[ 0.2618, -0.3238, -1.1342]],
[[ 0.6400, 0.0805, 0.6234],
[-0.0882, 0.6418, -1.2611],
[ 2.1407, 0.4212, 0.6318]]],
[[[ 0.0053, -0.2730, -1.1040],
[-0.5390, 1.0442, 0.8284],
[-1.6949, -0.1495, -0.4883]],
[[ 0.8880, -0.2460, 2.1454],
[ 0.1299, -0.7892, -0.2032],
[-0.9593, 1.3273, 0.1177]],
[[-0.5102, -0.1020, -0.8517],
[ 0.1001, 1.5952, -1.3652],
[ 0.1819, -0.5640, -0.3327]]]], dtype=torch.float64)
TorchScript [4-D] type:torch.float64
tensor([[[[-0.3672, 0.7810, -0.1067],
[ 0.6408, -1.1704, 0.8386],
[ 0.5624, 0.8729, 1.0339]],
[[ 0.2134, 1.9195, -0.0735],
[ 1.1209, -0.3472, -0.0196],
[-0.7719, -1.3813, 0.5966]],
[[ 0.7228, 0.2171, 0.3392],
[-0.6301, -1.0267, -1.1787],
[ 1.1477, 0.2301, -1.7894]]],
[[[ 1.4600, 0.1159, -0.1669],
[-1.1039, 1.2629, 0.5024],
[-0.4008, 0.0417, -0.4760]],
[[ 0.4487, -1.5529, -0.1530],
[ 0.8056, 1.3810, 0.8046],
[ 0.2618, -0.3238, -1.1342]],
[[ 0.6400, 0.0805, 0.6234],
[-0.0882, 0.6418, -1.2611],
[ 2.1407, 0.4212, 0.6318]]],
[[[ 0.0053, -0.2730, -1.1040],
[-0.5390, 1.0442, 0.8284],
[-1.6949, -0.1495, -0.4883]],
[[ 0.8880, -0.2460, 2.1454],
[ 0.1299, -0.7892, -0.2032],
[-0.9593, 1.3273, 0.1177]],
[[-0.5102, -0.1020, -0.8517],
[ 0.1001, 1.5952, -1.3652],
[ 0.1819, -0.5640, -0.3327]]]])
[ CPUDoubleType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.float64: (0.00148s)!
-----------
Eager [1-D] type:torch.float16
tensor([-0.3860, -0.0693, 0.3423, 0.5747], dtype=torch.float16)
TorchScript [1-D] type:torch.float16
tensor([-0.3860, -0.0693, 0.3423, 0.5747])
[ CPUHalfType{4} ]
TorchScript time for (1-D) -> torch.float16: (0.00062s)!
-----------
Eager [2-D] type:torch.float16
tensor([[ 1.3008, -0.2717, -1.3740],
[ 0.4285, -0.0085, -0.3813],
[ 0.4622, -2.0625, 1.0439]], dtype=torch.float16)
TorchScript [2-D] type:torch.float16
tensor([[ 1.3008, -0.2717, -1.3740],
[ 0.4285, -0.0085, -0.3813],
[ 0.4622, -2.0625, 1.0439]])
[ CPUHalfType{3,3} ]
TorchScript time for (2-D) -> torch.float16: (0.00066s)!
-----------
Eager [3-D] type:torch.float16
tensor([[[-2.1152, -0.3784, 1.1152],
[ 0.2140, -0.6284, 0.9453],
[-0.6826, -2.0312, -0.2345]],
[[-0.3672, -0.0370, 0.5859],
[ 0.4075, -1.1191, 1.5752],
[ 0.4614, 0.6147, 1.6367]],
[[ 0.7549, 0.3132, 0.1989],
[ 1.4971, -0.6523, -0.0135],
[ 0.3584, -1.4600, -0.6953]]], dtype=torch.float16)
TorchScript [3-D] type:torch.float16
tensor([[[-2.1152, -0.3784, 1.1152],
[ 0.2140, -0.6284, 0.9453],
[-0.6826, -2.0312, -0.2345]],
[[-0.3672, -0.0370, 0.5859],
[ 0.4075, -1.1191, 1.5752],
[ 0.4614, 0.6147, 1.6367]],
[[ 0.7549, 0.3132, 0.1989],
[ 1.4971, -0.6523, -0.0135],
[ 0.3584, -1.4600, -0.6953]]])
[ CPUHalfType{3,3,3} ]
TorchScript time for (3-D) -> torch.float16: (0.00092s)!
-----------
Eager [4-D] type:torch.float16
tensor([[[[-0.4292, 1.2949, 0.9067],
[-0.7510, -0.6694, 0.1415],
[-0.8149, 1.0605, -0.1300]],
[[ 0.6816, -0.7773, 0.2130],
[-0.4722, -1.4092, -1.1641],
[ 0.0521, -0.0919, -1.1826]],
[[-1.5967, -0.6943, -0.4578],
[-0.8784, 1.0254, 0.6855],
[-0.8892, -1.2510, -0.9111]]],
[[[ 0.6328, -0.0607, 1.1328],
[-0.3740, 1.4707, -0.8203],
[ 0.7148, 0.0824, -0.1300]],
[[ 1.5303, 0.1664, 0.5557],
[ 1.1855, -0.0152, 1.6963],
[ 1.7754, -0.9087, 0.2800]],
[[ 1.0732, 0.2301, 1.2227],
[-1.8096, 1.0352, 0.5200],
[ 0.2402, -0.6807, -0.6182]]],
[[[-0.1974, 0.9580, 1.4307],
[ 3.0078, 0.9312, 0.6519],
[-0.5146, 1.2285, -0.2915]],
[[-0.8755, -1.3066, 0.1902],
[ 0.8530, -0.8760, 0.6709],
[ 0.7700, 0.3877, -0.7290]],
[[-0.0091, 0.9395, 0.5366],
[ 1.0938, 1.2178, -0.0316],
[-0.3145, -0.6143, -0.5317]]]], dtype=torch.float16)
TorchScript [4-D] type:torch.float16
tensor([[[[-0.4292, 1.2949, 0.9067],
[-0.7510, -0.6694, 0.1415],
[-0.8149, 1.0605, -0.1300]],
[[ 0.6816, -0.7773, 0.2130],
[-0.4722, -1.4092, -1.1641],
[ 0.0521, -0.0919, -1.1826]],
[[-1.5967, -0.6943, -0.4578],
[-0.8784, 1.0254, 0.6855],
[-0.8892, -1.2510, -0.9111]]],
[[[ 0.6328, -0.0607, 1.1328],
[-0.3740, 1.4707, -0.8203],
[ 0.7148, 0.0824, -0.1300]],
[[ 1.5303, 0.1664, 0.5557],
[ 1.1855, -0.0152, 1.6963],
[ 1.7754, -0.9087, 0.2800]],
[[ 1.0732, 0.2301, 1.2227],
[-1.8096, 1.0352, 0.5200],
[ 0.2402, -0.6807, -0.6182]]],
[[[-0.1974, 0.9580, 1.4307],
[ 3.0078, 0.9312, 0.6519],
[-0.5146, 1.2285, -0.2915]],
[[-0.8755, -1.3066, 0.1902],
[ 0.8530, -0.8760, 0.6709],
[ 0.7700, 0.3877, -0.7290]],
[[-0.0091, 0.9395, 0.5366],
[ 1.0938, 1.2178, -0.0316],
[-0.3145, -0.6143, -0.5317]]]])
[ CPUHalfType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.float16: (0.00151s)!
-----------
Eager [1-D] type:torch.bfloat16
tensor([-0.1611, -1.4062, -0.0366, 0.7891], dtype=torch.bfloat16)
TorchScript [1-D] type:torch.bfloat16
tensor([-0.1611, -1.4062, -0.0366, 0.7891])
[ CPUBFloat16Type{4} ]
TorchScript time for (1-D) -> torch.bfloat16: (0.00057s)!
-----------
Eager [2-D] type:torch.bfloat16
tensor([[ 0.3691, -0.9766, -0.0349],
[ 0.4980, 0.0136, -0.8125],
[ 1.0859, -0.4746, -1.2812]], dtype=torch.bfloat16)
TorchScript [2-D] type:torch.bfloat16
tensor([[ 0.3691, -0.9766, -0.0349],
[ 0.4980, 0.0136, -0.8125],
[ 1.0859, -0.4746, -1.2812]])
[ CPUBFloat16Type{3,3} ]
TorchScript time for (2-D) -> torch.bfloat16: (0.00072s)!
-----------
Eager [3-D] type:torch.bfloat16
tensor([[[ 0.3555, -1.1172, -0.7500],
[ 0.0864, 0.2773, 2.0156],
[ 0.5039, -0.1299, 0.6250]],
[[-0.6797, 0.3516, -1.4141],
[-0.4785, -0.5508, -0.4668],
[-0.2910, 1.4609, -0.1089]],
[[-3.2812, -0.7148, 0.0381],
[-1.3125, 0.2637, 1.9453],
[ 0.9297, -0.5977, -0.5664]]], dtype=torch.bfloat16)
TorchScript [3-D] type:torch.bfloat16
tensor([[[ 0.3555, -1.1172, -0.7500],
[ 0.0864, 0.2773, 2.0156],
[ 0.5039, -0.1299, 0.6250]],
[[-0.6797, 0.3516, -1.4141],
[-0.4785, -0.5508, -0.4668],
[-0.2910, 1.4609, -0.1089]],
[[-3.2812, -0.7148, 0.0381],
[-1.3125, 0.2637, 1.9453],
[ 0.9297, -0.5977, -0.5664]]])
[ CPUBFloat16Type{3,3,3} ]
TorchScript time for (3-D) -> torch.bfloat16: (0.00090s)!
-----------
Eager [4-D] type:torch.bfloat16
tensor([[[[-1.2422e+00, -6.5234e-01, 6.2012e-02],
[ 1.2891e+00, 8.8379e-02, 5.6250e-01],
[ 1.6504e-01, 1.0703e+00, -8.6328e-01]],
[[ 3.8477e-01, 2.2754e-01, -8.3496e-02],
[ 8.6328e-01, -5.6250e-01, -5.5859e-01],
[ 1.0596e-01, -3.9844e-01, -3.2227e-01]],
[[ 8.8672e-01, -1.5938e+00, -5.0391e-01],
[-6.4844e-01, -1.4766e+00, -5.8984e-01],
[ 2.6758e-01, -1.7578e+00, 1.3184e-01]]],
[[[-9.6875e-01, 1.8125e+00, 1.2969e+00],
[ 1.8828e+00, 5.7220e-04, -3.6523e-01],
[-2.1719e+00, -2.5269e-02, -1.2578e+00]],
[[ 1.2188e+00, 3.4375e-01, -5.5469e-01],
[-3.3203e-01, 1.8516e+00, 1.2344e+00],
[-1.0156e+00, 3.2227e-01, -1.0078e+00]],
[[-2.0938e+00, -7.3047e-01, -1.3574e-01],
[-1.5312e+00, -1.1172e+00, -4.0820e-01],
[-3.4766e-01, 1.7773e-01, 6.9885e-03]]],
[[[-5.0391e-01, -1.1797e+00, 1.4801e-03],
[ 2.0781e+00, -8.9062e-01, -1.8984e+00],
[-4.8438e-01, -1.1016e+00, -1.4375e+00]],
[[ 9.3750e-02, -4.1797e-01, 5.5469e-01],
[ 1.6250e+00, 1.0312e+00, -1.6211e-01],
[-4.8633e-01, 3.2617e-01, -9.2969e-01]],
[[-4.6484e-01, 7.5391e-01, -7.0312e-01],
[-6.5625e-01, 8.2422e-01, -8.9844e-01],
[-9.8438e-01, -8.3594e-01, 1.6641e+00]]]], dtype=torch.bfloat16)
TorchScript [4-D] type:torch.bfloat16
tensor([[[[-1.2422e+00, -6.5234e-01, 6.2012e-02],
[ 1.2891e+00, 8.8379e-02, 5.6250e-01],
[ 1.6504e-01, 1.0703e+00, -8.6328e-01]],
[[ 3.8477e-01, 2.2754e-01, -8.3496e-02],
[ 8.6328e-01, -5.6250e-01, -5.5859e-01],
[ 1.0596e-01, -3.9844e-01, -3.2227e-01]],
[[ 8.8672e-01, -1.5938e+00, -5.0391e-01],
[-6.4844e-01, -1.4766e+00, -5.8984e-01],
[ 2.6758e-01, -1.7578e+00, 1.3184e-01]]],
[[[-9.6875e-01, 1.8125e+00, 1.2969e+00],
[ 1.8828e+00, 5.7220e-04, -3.6523e-01],
[-2.1719e+00, -2.5269e-02, -1.2578e+00]],
[[ 1.2188e+00, 3.4375e-01, -5.5469e-01],
[-3.3203e-01, 1.8516e+00, 1.2344e+00],
[-1.0156e+00, 3.2227e-01, -1.0078e+00]],
[[-2.0938e+00, -7.3047e-01, -1.3574e-01],
[-1.5312e+00, -1.1172e+00, -4.0820e-01],
[-3.4766e-01, 1.7773e-01, 6.9885e-03]]],
[[[-5.0391e-01, -1.1797e+00, 1.4801e-03],
[ 2.0781e+00, -8.9062e-01, -1.8984e+00],
[-4.8438e-01, -1.1016e+00, -1.4375e+00]],
[[ 9.3750e-02, -4.1797e-01, 5.5469e-01],
[ 1.6250e+00, 1.0312e+00, -1.6211e-01],
[-4.8633e-01, 3.2617e-01, -9.2969e-01]],
[[-4.6484e-01, 7.5391e-01, -7.0312e-01],
[-6.5625e-01, 8.2422e-01, -8.9844e-01],
[-9.8438e-01, -8.3594e-01, 1.6641e+00]]]])
[ CPUBFloat16Type{3,3,3,3} ]
TorchScript time for (4-D) -> torch.bfloat16: (0.00164s)!
-----------
==================== ALL FP DTYPES END =====================================
==================== ALL COMPLEX DTYPES =====================================
Eager [1-D] type:torch.complex64
tensor([-0.9300-0.8193j, 1.0416-0.1790j, 0.2302-0.8246j, 0.9158-1.4720j])
TorchScript [1-D] type:torch.complex64
tensor([-0.929979, 1.04156, 0.23018, 0.915819])
[ CPUComplexFloatType{4} ]
TorchScript time for (1-D) -> torch.complex64: (0.00037s)!
-----------
Eager [2-D] type:torch.complex64
tensor([[ 0.7654-0.5552j, -0.3561-0.6582j, -0.7426+1.0016j],
[-0.5377-0.4248j, 1.7243-0.4636j, -0.2695+0.6024j],
[-1.5785-0.2837j, -0.5250-0.2716j, -0.2962-0.6634j]])
TorchScript [2-D] type:torch.complex64
tensor([[ 0.765378, -0.35613, -0.742576],
[ -0.53769, 1.72431, -0.269472],
[ -1.57849, -0.524964, -0.296183]])
[ CPUComplexFloatType{3,3} ]
TorchScript time for (2-D) -> torch.complex64: (0.00027s)!
-----------
Eager [3-D] type:torch.complex64
tensor([[[ 0.1187-0.8477j, 0.2048-0.6291j, -0.3044-0.4063j],
[-0.7201+0.2617j, 0.3595+0.7699j, 0.1352+0.1472j],
[-0.3105-0.4218j, 0.6715+1.1293j, -0.5871+0.1121j]],
[[-1.4199-0.3818j, 0.0305-0.0972j, 0.2259-0.1177j],
[ 0.4217+0.6282j, 0.0384+1.4755j, -0.2068-0.8554j],
[-0.5060-0.3141j, -0.7597+0.3584j, -1.4490-0.0157j]],
[[-0.5656+0.1300j, 1.2757-0.7082j, 0.3222-0.5224j],
[ 0.1282+0.0769j, 0.2021-0.2422j, 1.0421-0.0763j],
[ 0.2629-0.2193j, -0.1899+0.3254j, -0.3020+0.9968j]]])
TorchScript [3-D] type:torch.complex64
tensor([[[ 0.118689, 0.204788, -0.304352],
[-0.720067, 0.359477, 0.135166],
[-0.310499, 0.671476, -0.587106]],
[[ -1.41988, 0.0304986, 0.225946],
[ 0.421696, 0.0384166, -0.206841],
[-0.506029, -0.759733, -1.44901]],
[[-0.565588, 1.27569, 0.322225],
[ 0.128218, 0.202146, 1.04206],
[ 0.262892, -0.189863, -0.301955]]])
[ CPUComplexFloatType{3,3,3} ]
TorchScript time for (3-D) -> torch.complex64: (0.00043s)!
-----------
Eager [4-D] type:torch.complex64
tensor([[[[-0.2770+1.2564j, 0.0624+0.3461j, -0.1096-0.9780j],
[-0.1064+0.1196j, -0.0807-0.7074j, 1.0841+0.2338j],
[-0.6579+0.8848j, -0.8972+1.5402j, 1.0161-0.2523j]],
[[-0.8023-0.3529j, -0.3264-0.7625j, 0.7875-0.3031j],
[ 0.4081-1.1618j, -0.7108-0.5414j, 0.4025+0.3835j],
[-0.2260+0.7985j, 0.6247+0.2080j, 0.3565+0.5803j]],
[[-1.0015-0.5233j, 0.1487+0.3331j, 1.3734-0.4958j],
[-0.3718+1.1961j, 0.0481+0.0045j, -1.0854+0.2778j],
[-1.2743-0.3658j, -1.0658+0.2781j, 0.0324+0.3726j]]],
[[[-0.3592+1.1516j, 0.0836+1.7643j, -0.6491-0.1094j],
[-0.9571+0.6677j, 0.4811-0.1759j, 0.3048-0.6663j],
[ 0.0968+0.9757j, 0.8968-0.9973j, 0.8238+0.3748j]],
[[-0.2758+0.6013j, -0.0417+0.7014j, -1.0368-1.1817j],
[ 0.2000-0.4017j, 1.5942+0.0257j, -0.1139+1.2012j],
[ 1.3841+0.8868j, 0.0045+0.1748j, 0.4462+0.8938j]],
[[ 0.3098-0.5984j, 0.5275-1.1977j, -0.3903-0.2357j],
[ 0.0210-0.1187j, -0.5680-0.4810j, -0.9874+0.9660j],
[ 0.5201-0.5420j, -0.7279-0.1538j, -0.2525+1.1461j]]],
[[[ 0.3931-0.6476j, -0.6775-0.4679j, 0.2828-0.5100j],
[-0.8703+0.5057j, -0.6477-0.7761j, -0.7263+0.8911j],
[ 0.8610-0.8076j, 0.5092+1.8037j, -0.5001+0.7840j]],
[[ 1.6583+0.4654j, -0.0240+0.9129j, -1.0653-0.4706j],
[-0.0226+0.8494j, 0.8013-1.3940j, -0.4003-0.2516j],
[-0.0088-0.4451j, -0.5306-0.1723j, -0.5267-1.1099j]],
[[ 0.1311+0.4117j, 0.5136-0.1520j, 0.0057+0.9654j],
[-0.1173-1.0415j, -0.9109-0.3181j, -0.4856-0.0579j],
[-0.0979-0.2113j, -0.3925-0.6164j, -0.3862-0.8679j]]]])
TorchScript [4-D] type:torch.complex64
tensor([[[[ -0.276973, 0.0624206, -0.109605],
[ -0.106446, -0.0806542, 1.08415],
[ -0.657891, -0.897199, 1.01614]],
[[ -0.802301, -0.326433, 0.78753],
[ 0.408126, -0.710759, 0.402528],
[ -0.225973, 0.624749, 0.356539]],
[[ -1.00149, 0.148687, 1.37345],
[ -0.371842, 0.0480938, -1.08543],
[ -1.27432, -1.06579, 0.0324239]]],
[[[ -0.359222, 0.0835989, -0.649138],
[ -0.957107, 0.48113, 0.304793],
[ 0.0968043, 0.896822, 0.823842]],
[[ -0.275812, -0.0417161, -1.03676],
[ 0.200029, 1.59425, -0.113897],
[ 1.38411, 0.00449597, 0.446219]],
[[ 0.309771, 0.527528, -0.390277],
[ 0.0209679, -0.568032, -0.987367],
[ 0.520079, -0.727863, -0.252475]]],
[[[ 0.393053, -0.6775, 0.282834],
[ -0.870277, -0.647742, -0.726313],
[ 0.860998, 0.509183, -0.500095]],
[[ 1.65826, -0.0240009, -1.06528],
[ -0.022581, 0.801281, -0.400253],
[-0.00884069, -0.530556, -0.526716]],
[[ 0.13114, 0.513602, 0.00569676],
[ -0.117331, -0.910932, -0.485619],
[ -0.0978902, -0.392469, -0.386172]]]])
[ CPUComplexFloatType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.complex64: (0.00113s)!
-----------
Eager [1-D] type:torch.complex128
tensor([-0.5541+0.3006j, -0.1170-0.9491j, 1.0815+1.2962j, 0.2352-0.5141j],
dtype=torch.complex128)
TorchScript [1-D] type:torch.complex128
tensor([-0.554088, -0.117041, 1.08152, 0.235216])
[ CPUComplexDoubleType{4} ]
TorchScript time for (1-D) -> torch.complex128: (0.00023s)!
-----------
Eager [2-D] type:torch.complex128
tensor([[-0.5116+0.4584j, -0.2846+0.2599j, 0.2571+0.0945j],
[ 0.3809+0.6653j, 0.7015-0.0745j, 0.2919+0.2500j],
[-0.2755+0.1267j, 0.4371+0.1487j, 0.2751+0.1981j]],
dtype=torch.complex128)
TorchScript [2-D] type:torch.complex128
tensor([[-0.511646, -0.284603, 0.257134],
[ 0.380887, 0.701547, 0.291895],
[-0.275472, 0.437103, 0.275115]])
[ CPUComplexDoubleType{3,3} ]
TorchScript time for (2-D) -> torch.complex128: (0.00029s)!
-----------
Eager [3-D] type:torch.complex128
tensor([[[-1.3331-0.7625j, 0.2398+0.7519j, -0.7508-0.6379j],
[-1.3028+0.9423j, 1.0573+1.5073j, 0.6074+0.4088j],
[ 1.1923-0.2695j, 0.6222+0.7919j, 0.5064+0.7842j]],
[[ 0.5542+0.2179j, -1.0093+1.2744j, -0.0727-0.5609j],
[-0.4124+0.9072j, 0.3583-0.5023j, 1.1280+0.9233j],
[-0.2125-0.9957j, -1.0686+0.7063j, 0.5790-0.1411j]],
[[-0.6149+0.5442j, -0.2032-1.0726j, -0.7390+0.5122j],
[ 0.6448-0.0789j, 0.5638-0.7055j, -1.3482-0.4081j],
[ 0.1963+1.4464j, -0.0094-0.6841j, 0.6183-0.3520j]]],
dtype=torch.complex128)
TorchScript [3-D] type:torch.complex128
tensor([[[ -1.33314, 0.239838, -0.750793],
[ -1.30283, 1.05726, 0.607356],
[ 1.19231, 0.622213, 0.506445]],
[[ 0.554219, -1.0093, -0.072731],
[ -0.412442, 0.358349, 1.12796],
[ -0.212488, -1.0686, 0.579037]],
[[ -0.614893, -0.20321, -0.738997],
[ 0.644806, 0.563838, -1.34817],
[ 0.196272, -0.00941007, 0.61827]]])
[ CPUComplexDoubleType{3,3,3} ]
TorchScript time for (3-D) -> torch.complex128: (0.00049s)!
-----------
Eager [4-D] type:torch.complex128
tensor([[[[-0.1438+0.6406j, 0.2292-0.6868j, 0.3303-0.4974j],
[-1.3659-0.3466j, 0.0238-0.3286j, -1.1602-0.2299j],
[ 1.0483-0.0264j, 0.3616-0.3609j, 0.0640-0.4927j]],
[[ 0.6418+1.3700j, 0.5684+0.3227j, 1.3343+0.7246j],
[ 0.0564-0.1302j, 1.4262-0.6939j, 0.2703+0.3315j],
[ 0.0959+0.5528j, -0.1110-0.6185j, 1.5638-1.1916j]],
[[ 0.9512-0.1365j, 0.1159-0.7107j, -0.5493+0.0276j],
[ 0.0648+0.1242j, -0.9441+0.6092j, 0.4529-0.3871j],
[-0.0479+0.1198j, 1.2779-0.9687j, 0.4327-0.9936j]]],
[[[ 0.9696+0.0888j, -0.7910+1.0052j, -0.1918+0.6080j],
[ 0.5974+0.4345j, -0.0058+0.0296j, 0.4990-1.3724j],
[ 0.3110-0.7513j, -1.2225-0.7290j, -0.1452-0.4239j]],
[[ 0.0495+1.3660j, 0.3703-0.1851j, -0.1337-1.5077j],
[ 1.4892-0.0320j, 1.3482+0.8516j, -1.0963-1.5440j],
[-1.1369-0.3407j, 0.3531-0.1352j, 0.6037-0.3685j]],
[[-0.5621-0.1708j, 0.0493-0.5630j, -0.7528+2.3886j],
[-0.4093-0.0190j, -0.5031-0.2239j, -0.5433+0.9725j],
[-0.5930-0.6488j, -0.3962-0.8055j, 0.1806-0.4349j]]],
[[[-0.8766-0.7920j, 0.4793+0.2671j, 0.2854-0.7272j],
[ 0.7042+1.7739j, -1.4563-0.0706j, -0.5786+0.8518j],
[-0.2903-0.8915j, 0.2880-0.6191j, -0.0444-0.3684j]],
[[ 0.3163-0.2851j, -0.4126+0.9377j, 0.7007-0.1036j],
[ 0.0532+1.2781j, -0.6278-0.4776j, 0.2459+0.0950j],
[-0.2003-0.8868j, -0.5270-1.2496j, -0.2047-0.3906j]],
[[ 0.0998-0.1580j, -0.4435-0.2390j, -1.0446+0.4052j],
[ 0.0454+0.3333j, 1.4255-0.1104j, 0.1926+1.9190j],
[-1.6646-1.1313j, -0.5170-0.4106j, -0.4709+0.8378j]]]],
dtype=torch.complex128)
TorchScript [4-D] type:torch.complex128
tensor([[[[ -0.143777, 0.229205, 0.330318],
[ -1.36585, 0.0238296, -1.16019],
[ 1.04832, 0.361553, 0.0640072]],
[[ 0.641778, 0.5684, 1.33434],
[ 0.0564258, 1.42621, 0.270269],
[ 0.0959252, -0.111027, 1.56381]],
[[ 0.951225, 0.115901, -0.549262],
[ 0.0647512, -0.94409, 0.452886],
[-0.0479398, 1.27786, 0.432691]]],
[[[ 0.969609, -0.790952, -0.191788],
[ 0.597404, -0.0057586, 0.499015],
[ 0.310964, -1.22249, -0.145161]],
[[ 0.0495146, 0.370292, -0.133655],
[ 1.48924, 1.34823, -1.09631],
[ -1.13689, 0.353119, 0.603676]],
[[ -0.562089, 0.0493219, -0.752824],
[ -0.409285, -0.503069, -0.543332],
[ -0.592951, -0.396185, 0.180635]]],
[[[ -0.876644, 0.479266, 0.285381],
[ 0.704226, -1.45634, -0.578639],
[ -0.290311, 0.287959, -0.0444247]],
[[ 0.316305, -0.412555, 0.70068],
[ 0.0532242, -0.627831, 0.245922],
[ -0.20032, -0.526999, -0.204711]],
[[ 0.0997512, -0.443509, -1.04456],
[ 0.0454442, 1.42549, 0.192624],
[ -1.66456, -0.516995, -0.470874]]]])
[ CPUComplexDoubleType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.complex128: (0.00108s)!
-----------
==================== ALL COMPLEX DTYPES END =====================================
==================== BOOL DTYPES =====================================
Eager [1-D] type:torch.bool
tensor([False, True, False])
TorchScript [1-D] type:torch.bool
tensor([0, 1, 0])
[ CPUBoolType{3} ]
TorchScript time for (1-D) -> torch.bool: (0.00017s)!
-----------
Eager [2-D] type:torch.bool
tensor([[ True, False, False],
[ True, False, True],
[False, True, True]])
TorchScript [2-D] type:torch.bool
tensor([[1, 0, 0],
[1, 0, 1],
[0, 1, 1]])
[ CPUBoolType{3,3} ]
TorchScript time for (2-D) -> torch.bool: (0.00022s)!
-----------
Eager [3-D] type:torch.bool
tensor([[[False, True, True],
[False, True, False],
[ True, False, False]],
[[ True, True, True],
[ True, True, False],
[False, False, True]],
[[ True, True, True],
[ True, False, True],
[False, False, True]]])
TorchScript [3-D] type:torch.bool
tensor([[[0, 1, 1],
[0, 1, 0],
[1, 0, 0]],
[[1, 1, 1],
[1, 1, 0],
[0, 0, 1]],
[[1, 1, 1],
[1, 0, 1],
[0, 0, 1]]])
[ CPUBoolType{3,3,3} ]
TorchScript time for (3-D) -> torch.bool: (0.00040s)!
-----------
Eager [4-D] type:torch.bool
tensor([[[[ True, False, True],
[False, False, False],
[False, True, True]],
[[False, True, True],
[False, False, False],
[ True, False, True]],
[[ True, True, True],
[ True, False, True],
[ True, True, False]]],
[[[ True, True, False],
[False, False, True],
[ True, True, False]],
[[ True, False, True],
[False, True, False],
[ True, False, True]],
[[False, True, False],
[False, True, False],
[ True, False, False]]],
[[[ True, False, True],
[ True, False, False],
[ True, True, False]],
[[False, False, False],
[False, False, True],
[ True, True, True]],
[[False, True, True],
[ True, True, False],
[ True, False, True]]]])
TorchScript [4-D] type:torch.bool
tensor([[[[1, 0, 1],
[0, 0, 0],
[0, 1, 1]],
[[0, 1, 1],
[0, 0, 0],
[1, 0, 1]],
[[1, 1, 1],
[1, 0, 1],
[1, 1, 0]]],
[[[1, 1, 0],
[0, 0, 1],
[1, 1, 0]],
[[1, 0, 1],
[0, 1, 0],
[1, 0, 1]],
[[0, 1, 0],
[0, 1, 0],
[1, 0, 0]]],
[[[1, 0, 1],
[1, 0, 0],
[1, 1, 0]],
[[0, 0, 0],
[0, 0, 1],
[1, 1, 1]],
[[0, 1, 1],
[1, 1, 0],
[1, 0, 1]]]])
[ CPUBoolType{3,3,3,3} ]
TorchScript time for (4-D) -> torch.bool: (0.00116s)!
-----------
import torch
from typing import Any
import time
def f(a : Any):
print(a)
return (isinstance(a, torch.Tensor))
m = torch.jit.script(f)
multipliers = [10 ** n for n in range(-100, 100, 5)]
def _p(tp, dt, tensor_):
print(f"Eager [{tp}] type:{dt}")
f(tensor_)
print(f"TorchScript [{tp}] type:{dt}")
st_1 = time.time()
m(tensor_)
et_1 = time.time()
print(f"TorchScript time for ({tp}) -> {dt}: ({et_1 - st_1:.5f}s)! ")
print("-----------")
print("==================== ALL INT DTYPES =====================================")
for dt in torch.testing.get_all_int_dtypes():
one_d = torch.randint(4, (3,),dtype=dt)
two_d = torch.randint(0, 100, (3, 3), dtype=dt)
three_d = torch.randint(0, 100, (3, 3, 3), dtype=dt)
four_d = torch.randint(0, 100, (3, 3, 3, 3), dtype=dt)
_p("1-D", dt, one_d)
_p("2-D", dt, two_d)
_p("3-D", dt, three_d)
_p("4-D", dt, four_d)
print("==================== ALL INT DTYPES END =====================================")
print("==================== ALL FP DTYPES =====================================")
for dt in torch.testing.get_all_fp_dtypes():
one_d = torch.randn(4, dtype=dt)
two_d = torch.randn(3, 3, dtype=dt)
three_d = torch.randn(3, 3, 3, dtype=dt)
four_d = torch.randn(3, 3, 3, 3, dtype=dt)
_p("1-D", dt, one_d)
_p("2-D", dt, two_d)
_p("3-D", dt, three_d)
_p("4-D", dt, four_d)
print("==================== ALL FP DTYPES END =====================================")
print("==================== ALL COMPLEX DTYPES =====================================")
for dt in torch.testing.get_all_complex_dtypes():
one_d = torch.randn(4, dtype=dt)
two_d = torch.randn(3, 3, dtype=dt)
three_d = torch.randn(3, 3, 3, dtype=dt)
four_d = torch.randn(3, 3, 3, 3, dtype=dt)
_p("1-D", dt, one_d)
_p("2-D", dt, two_d)
_p("3-D", dt, three_d)
_p("4-D", dt, four_d)
print("==================== ALL COMPLEX DTYPES END =====================================")
print("==================== BOOL DTYPES =====================================")
one_d = torch.randint(0, 2, (3,), dtype=torch.bool)
two_d = torch.randint(0, 2, (3,3), dtype=torch.bool)
three_d = torch.randint(0, 2, (3,3,3), dtype=torch.bool)
four_d = torch.randint(0, 2, (3,3,3,3), dtype=torch.bool)
_p("1-D", torch.bool, one_d)
_p("2-D", torch.bool, two_d)
_p("3-D", torch.bool, three_d)
_p("4-D", torch.bool, four_d)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment