1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132
| HloModule cluster_1992353871243790009__.123
%add_F32.87 (lhs.88: f32[], rhs.89: f32[]) -> f32[] { %lhs.88 = f32[] parameter(0) %rhs.89 = f32[] parameter(1) ROOT %add.90 = f32[] add(f32[] %lhs.88, f32[] %rhs.89) }
%max_float_.102 (x.103: f32[], y.104: f32[]) -> f32[] { %x.103 = f32[] parameter(0) %y.104 = f32[] parameter(1) ROOT %maximum.105 = f32[] maximum(f32[] %x.103, f32[] %y.104) }
%add_float_.112 (x.113: f32[], y.114: f32[]) -> f32[] { %x.113 = f32[] parameter(0) %y.114 = f32[] parameter(1) ROOT %add.115 = f32[] add(f32[] %x.113, f32[] %y.114) }
ENTRY %cluster_1992353871243790009__.123 (arg0.1: f32[1,224,224,3], arg1.2: f32[32], arg2.3: f32[3,3,32,1], arg3.4: f32[32], arg4.5: f32[32], arg5.6: f32[32], arg6.7: f32[64], arg7.8: f32[1,1,32,64], arg8.9: f32[64], arg9.10: f32[64], arg10.11: f32[64], arg11.12: f32[64], arg12.13: f32[3,3,64,1], arg13.14: f32[64], arg14.15: f32[64], arg15.16: f32[64], arg16.17: f32[128], arg17.18: f32[1,1,64,128], arg18.19: f32[128], arg19.20: f32[128], arg20.21: f32[128], arg21.22: f32[1000], arg22.23: f32[128,1000], arg23.24: f32[32], arg24.25: f32[32], arg25.26: f32[32], arg26.27: f32[32], arg27.28: f32[3,3,3,32]) -> f32[1,1000] { %constant.80 = f32[] constant(0), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/Relu"} %broadcast.81 = f32[1,56,56,128]{3,2,1,0} broadcast(f32[] %constant.80), dimensions={}, metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/Relu"} %constant.71 = f32[] constant(0), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/Relu"} %broadcast.72 = f32[1,56,56,64]{3,2,1,0} broadcast(f32[] %constant.71), dimensions={}, metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/Relu"} %constant.61 = f32[] constant(0), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_1/pw_batch_norm/Relu"} %broadcast.62 = f32[1,112,112,64]{3,2,1,0} broadcast(f32[] %constant.61), dimensions={}, metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_1/pw_batch_norm/Relu"} %constant.52 = f32[] constant(0), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/Relu"} %broadcast.53 = f32[1,112,112,32]{3,2,1,0} broadcast(f32[] %constant.52), dimensions={}, metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/Relu"} %constant.42 = f32[] constant(0), metadata={op_type="Relu" op_name="mobilenet/first_batch_norm/Relu"} %broadcast.43 = f32[1,112,112,32]{3,2,1,0} broadcast(f32[] %constant.42), dimensions={}, metadata={op_type="Relu" op_name="mobilenet/first_batch_norm/Relu"} %arg0.1 = f32[1,224,224,3]{3,2,1,0} parameter(0), parameter_replication={false}, metadata={op_name="XLA_Args"} %reshape.29 = f32[1,224,224,3]{3,2,1,0} reshape(f32[1,224,224,3]{3,2,1,0} %arg0.1) %arg27.28 = f32[3,3,3,32]{3,2,1,0} parameter(27), parameter_replication={false}, metadata={op_name="XLA_Args"} %convolution.36 = f32[1,112,112,32]{3,2,1,0} convolution(f32[1,224,224,3]{3,2,1,0} %reshape.29, f32[3,3,3,32]{3,2,1,0} %arg27.28), window={size=3x3 stride=2x2 pad=0_1x0_1}, dim_labels=b01f_01io->b01f, metadata={op_type="Conv2D" op_name="mobilenet/first_conv3x3/Conv2D"} %arg26.27 = f32[32]{0} parameter(26), parameter_replication={false}, metadata={op_name="XLA_Args"} %broadcast.37 = f32[1,112,112,32]{3,2,1,0} broadcast(f32[32]{0} %arg26.27), dimensions={3}, metadata={op_type="BiasAdd" op_name="mobilenet/first_conv3x3/BiasAdd"} %add.38 = f32[1,112,112,32]{3,2,1,0} add(f32[1,112,112,32]{3,2,1,0} %convolution.36, f32[1,112,112,32]{3,2,1,0} %broadcast.37), metadata={op_type="BiasAdd" op_name="mobilenet/first_conv3x3/BiasAdd"} %convert.39 = f32[1,112,112,32]{3,2,1,0} convert(f32[1,112,112,32]{3,2,1,0} %add.38), metadata={op_type="FusedBatchNorm" op_name="mobilenet/first_batch_norm/FusedBatchNorm"} %constant.30 = f32[] constant(1), metadata={op_type="Const" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/Const"} %broadcast.31 = f32[32]{0} broadcast(f32[] %constant.30), dimensions={}, metadata={op_type="Const" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/Const"} %arg23.24 = f32[32]{0} parameter(23), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg24.25 = f32[32]{0} parameter(24), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg25.26 = f32[32]{0} parameter(25), parameter_replication={false}, metadata={op_name="XLA_Args"} %batch-norm-inference.40 = f32[1,112,112,32]{3,2,1,0} batch-norm-inference(f32[1,112,112,32]{3,2,1,0} %convert.39, f32[32]{0} %broadcast.31, f32[32]{0} %arg23.24, f32[32]{0} %arg24.25, f32[32]{0} %arg25.26), epsilon=0.001, feature_index=3, metadata={op_type="FusedBatchNorm" op_name="mobilenet/first_batch_norm/FusedBatchNorm"} %convert.41 = f32[1,112,112,32]{3,2,1,0} convert(f32[1,112,112,32]{3,2,1,0} %batch-norm-inference.40), metadata={op_type="FusedBatchNorm" op_name="mobilenet/first_batch_norm/FusedBatchNorm"} %maximum.44 = f32[1,112,112,32]{3,2,1,0} maximum(f32[1,112,112,32]{3,2,1,0} %broadcast.43, f32[1,112,112,32]{3,2,1,0} %convert.41), metadata={op_type="Relu" op_name="mobilenet/first_batch_norm/Relu"} %arg2.3 = f32[3,3,32,1]{3,2,1,0} parameter(2), parameter_replication={false}, metadata={op_name="XLA_Args"} %reshape.45 = f32[3,3,1,32]{3,2,1,0} reshape(f32[3,3,32,1]{3,2,1,0} %arg2.3), metadata={op_type="DepthwiseConv2dNative" op_name="mobilenet/depthwise_seperable_1/depthwise/depthwise"} %convolution.46 = f32[1,112,112,32]{3,2,1,0} convolution(f32[1,112,112,32]{3,2,1,0} %maximum.44, f32[3,3,1,32]{3,2,1,0} %reshape.45), window={size=3x3 pad=1_1x1_1}, dim_labels=b01f_01io->b01f, feature_group_count=32, metadata={op_type="DepthwiseConv2dNative" op_name="mobilenet/depthwise_seperable_1/depthwise/depthwise"} %arg1.2 = f32[32]{0} parameter(1), parameter_replication={false}, metadata={op_name="XLA_Args"} %broadcast.47 = f32[1,112,112,32]{3,2,1,0} broadcast(f32[32]{0} %arg1.2), dimensions={3}, metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_1/depthwise/BiasAdd"} %add.48 = f32[1,112,112,32]{3,2,1,0} add(f32[1,112,112,32]{3,2,1,0} %convolution.46, f32[1,112,112,32]{3,2,1,0} %broadcast.47), metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_1/depthwise/BiasAdd"} %convert.49 = f32[1,112,112,32]{3,2,1,0} convert(f32[1,112,112,32]{3,2,1,0} %add.48), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/FusedBatchNorm"} %arg3.4 = f32[32]{0} parameter(3), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg4.5 = f32[32]{0} parameter(4), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg5.6 = f32[32]{0} parameter(5), parameter_replication={false}, metadata={op_name="XLA_Args"} %batch-norm-inference.50 = f32[1,112,112,32]{3,2,1,0} batch-norm-inference(f32[1,112,112,32]{3,2,1,0} %convert.49, f32[32]{0} %broadcast.31, f32[32]{0} %arg3.4, f32[32]{0} %arg4.5, f32[32]{0} %arg5.6), epsilon=0.001, feature_index=3, metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/FusedBatchNorm"} %convert.51 = f32[1,112,112,32]{3,2,1,0} convert(f32[1,112,112,32]{3,2,1,0} %batch-norm-inference.50), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/FusedBatchNorm"} %maximum.54 = f32[1,112,112,32]{3,2,1,0} maximum(f32[1,112,112,32]{3,2,1,0} %broadcast.53, f32[1,112,112,32]{3,2,1,0} %convert.51), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_1/dw_batch_norm/Relu"} %arg7.8 = f32[1,1,32,64]{3,2,1,0} parameter(7), parameter_replication={false}, metadata={op_name="XLA_Args"} %convolution.55 = f32[1,112,112,64]{3,2,1,0} convolution(f32[1,112,112,32]{3,2,1,0} %maximum.54, f32[1,1,32,64]{3,2,1,0} %arg7.8), window={size=1x1}, dim_labels=b01f_01io->b01f, metadata={op_type="Conv2D" op_name="mobilenet/depthwise_seperable_1/pointwise/Conv2D"} %arg6.7 = f32[64]{0} parameter(6), parameter_replication={false}, metadata={op_name="XLA_Args"} %broadcast.56 = f32[1,112,112,64]{3,2,1,0} broadcast(f32[64]{0} %arg6.7), dimensions={3}, metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_1/pointwise/BiasAdd"} %add.57 = f32[1,112,112,64]{3,2,1,0} add(f32[1,112,112,64]{3,2,1,0} %convolution.55, f32[1,112,112,64]{3,2,1,0} %broadcast.56), metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_1/pointwise/BiasAdd"} %convert.58 = f32[1,112,112,64]{3,2,1,0} convert(f32[1,112,112,64]{3,2,1,0} %add.57), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_1/pw_batch_norm/FusedBatchNorm"} %constant.32 = f32[] constant(1), metadata={op_type="Const" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/Const"} %broadcast.33 = f32[64]{0} broadcast(f32[] %constant.32), dimensions={}, metadata={op_type="Const" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/Const"} %arg8.9 = f32[64]{0} parameter(8), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg9.10 = f32[64]{0} parameter(9), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg10.11 = f32[64]{0} parameter(10), parameter_replication={false}, metadata={op_name="XLA_Args"} %batch-norm-inference.59 = f32[1,112,112,64]{3,2,1,0} batch-norm-inference(f32[1,112,112,64]{3,2,1,0} %convert.58, f32[64]{0} %broadcast.33, f32[64]{0} %arg8.9, f32[64]{0} %arg9.10, f32[64]{0} %arg10.11), epsilon=0.001, feature_index=3, metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_1/pw_batch_norm/FusedBatchNorm"} %convert.60 = f32[1,112,112,64]{3,2,1,0} convert(f32[1,112,112,64]{3,2,1,0} %batch-norm-inference.59), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_1/pw_batch_norm/FusedBatchNorm"} %maximum.63 = f32[1,112,112,64]{3,2,1,0} maximum(f32[1,112,112,64]{3,2,1,0} %broadcast.62, f32[1,112,112,64]{3,2,1,0} %convert.60), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_1/pw_batch_norm/Relu"} %arg12.13 = f32[3,3,64,1]{3,2,1,0} parameter(12), parameter_replication={false}, metadata={op_name="XLA_Args"} %reshape.64 = f32[3,3,1,64]{3,2,1,0} reshape(f32[3,3,64,1]{3,2,1,0} %arg12.13), metadata={op_type="DepthwiseConv2dNative" op_name="mobilenet/depthwise_seperable_2/depthwise/depthwise"} %convolution.65 = f32[1,56,56,64]{3,2,1,0} convolution(f32[1,112,112,64]{3,2,1,0} %maximum.63, f32[3,3,1,64]{3,2,1,0} %reshape.64), window={size=3x3 stride=2x2 pad=0_1x0_1}, dim_labels=b01f_01io->b01f, feature_group_count=64, metadata={op_type="DepthwiseConv2dNative" op_name="mobilenet/depthwise_seperable_2/depthwise/depthwise"} %arg11.12 = f32[64]{0} parameter(11), parameter_replication={false}, metadata={op_name="XLA_Args"} %broadcast.66 = f32[1,56,56,64]{3,2,1,0} broadcast(f32[64]{0} %arg11.12), dimensions={3}, metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_2/depthwise/BiasAdd"} %add.67 = f32[1,56,56,64]{3,2,1,0} add(f32[1,56,56,64]{3,2,1,0} %convolution.65, f32[1,56,56,64]{3,2,1,0} %broadcast.66), metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_2/depthwise/BiasAdd"} %convert.68 = f32[1,56,56,64]{3,2,1,0} convert(f32[1,56,56,64]{3,2,1,0} %add.67), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/FusedBatchNorm"} %arg13.14 = f32[64]{0} parameter(13), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg14.15 = f32[64]{0} parameter(14), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg15.16 = f32[64]{0} parameter(15), parameter_replication={false}, metadata={op_name="XLA_Args"} %batch-norm-inference.69 = f32[1,56,56,64]{3,2,1,0} batch-norm-inference(f32[1,56,56,64]{3,2,1,0} %convert.68, f32[64]{0} %broadcast.33, f32[64]{0} %arg13.14, f32[64]{0} %arg14.15, f32[64]{0} %arg15.16), epsilon=0.001, feature_index=3, metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/FusedBatchNorm"} %convert.70 = f32[1,56,56,64]{3,2,1,0} convert(f32[1,56,56,64]{3,2,1,0} %batch-norm-inference.69), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/FusedBatchNorm"} %maximum.73 = f32[1,56,56,64]{3,2,1,0} maximum(f32[1,56,56,64]{3,2,1,0} %broadcast.72, f32[1,56,56,64]{3,2,1,0} %convert.70), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_2/dw_batch_norm/Relu"} %arg17.18 = f32[1,1,64,128]{3,2,1,0} parameter(17), parameter_replication={false}, metadata={op_name="XLA_Args"} %convolution.74 = f32[1,56,56,128]{3,2,1,0} convolution(f32[1,56,56,64]{3,2,1,0} %maximum.73, f32[1,1,64,128]{3,2,1,0} %arg17.18), window={size=1x1}, dim_labels=b01f_01io->b01f, metadata={op_type="Conv2D" op_name="mobilenet/depthwise_seperable_2/pointwise/Conv2D"} %arg16.17 = f32[128]{0} parameter(16), parameter_replication={false}, metadata={op_name="XLA_Args"} %broadcast.75 = f32[1,56,56,128]{3,2,1,0} broadcast(f32[128]{0} %arg16.17), dimensions={3}, metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_2/pointwise/BiasAdd"} %add.76 = f32[1,56,56,128]{3,2,1,0} add(f32[1,56,56,128]{3,2,1,0} %convolution.74, f32[1,56,56,128]{3,2,1,0} %broadcast.75), metadata={op_type="BiasAdd" op_name="mobilenet/depthwise_seperable_2/pointwise/BiasAdd"} %convert.77 = f32[1,56,56,128]{3,2,1,0} convert(f32[1,56,56,128]{3,2,1,0} %add.76), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/FusedBatchNorm"} %constant.34 = f32[] constant(1), metadata={op_type="Const" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/Const"} %broadcast.35 = f32[128]{0} broadcast(f32[] %constant.34), dimensions={}, metadata={op_type="Const" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/Const"} %arg18.19 = f32[128]{0} parameter(18), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg19.20 = f32[128]{0} parameter(19), parameter_replication={false}, metadata={op_name="XLA_Args"} %arg20.21 = f32[128]{0} parameter(20), parameter_replication={false}, metadata={op_name="XLA_Args"} %batch-norm-inference.78 = f32[1,56,56,128]{3,2,1,0} batch-norm-inference(f32[1,56,56,128]{3,2,1,0} %convert.77, f32[128]{0} %broadcast.35, f32[128]{0} %arg18.19, f32[128]{0} %arg19.20, f32[128]{0} %arg20.21), epsilon=0.001, feature_index=3, metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/FusedBatchNorm"} %convert.79 = f32[1,56,56,128]{3,2,1,0} convert(f32[1,56,56,128]{3,2,1,0} %batch-norm-inference.78), metadata={op_type="FusedBatchNorm" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/FusedBatchNorm"} %maximum.82 = f32[1,56,56,128]{3,2,1,0} maximum(f32[1,56,56,128]{3,2,1,0} %broadcast.81, f32[1,56,56,128]{3,2,1,0} %convert.79), metadata={op_type="Relu" op_name="mobilenet/depthwise_seperable_2/pw_batch_norm/Relu"} %convert.83 = f32[1,56,56,128]{3,2,1,0} convert(f32[1,56,56,128]{3,2,1,0} %maximum.82), metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %constant.85 = f32[] constant(0), metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %pad.86 = f32[1,56,56,128]{3,2,1,0} pad(f32[1,56,56,128]{3,2,1,0} %convert.83, f32[] %constant.85), padding=0_0x0_0x0_0x0_0, metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %constant.84 = f32[] constant(0), metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %reduce-window.91 = f32[1,1,1,128]{3,2,1,0} reduce-window(f32[1,56,56,128]{3,2,1,0} %pad.86, f32[] %constant.84), window={size=1x56x56x1 stride=1x2x2x1}, to_apply=%add_F32.87, metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %constant.92 = f32[] constant(3136), metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %broadcast.93 = f32[1,1,1,128]{3,2,1,0} broadcast(f32[] %constant.92), dimensions={}, metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %divide.94 = f32[1,1,1,128]{3,2,1,0} divide(f32[1,1,1,128]{3,2,1,0} %reduce-window.91, f32[1,1,1,128]{3,2,1,0} %broadcast.93), metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %convert.95 = f32[1,1,1,128]{3,2,1,0} convert(f32[1,1,1,128]{3,2,1,0} %divide.94), metadata={op_type="AvgPool" op_name="mobilenet/avg_pool2d/AvgPool"} %reshape.96 = f32[1,128]{1,0} reshape(f32[1,1,1,128]{3,2,1,0} %convert.95), metadata={op_type="Squeeze" op_name="mobilenet/squeeze"} %arg22.23 = f32[128,1000]{1,0} parameter(22), parameter_replication={false}, metadata={op_name="XLA_Args"} %dot.97 = f32[1,1000]{1,0} dot(f32[1,128]{1,0} %reshape.96, f32[128,1000]{1,0} %arg22.23), lhs_contracting_dims={1}, rhs_contracting_dims={0}, metadata={op_type="MatMul" op_name="mobilenet/fc/MatMul"} %arg21.22 = f32[1000]{0} parameter(21), parameter_replication={false}, metadata={op_name="XLA_Args"} %broadcast.98 = f32[1,1000]{1,0} broadcast(f32[1000]{0} %arg21.22), dimensions={1}, metadata={op_type="BiasAdd" op_name="mobilenet/fc/BiasAdd"} %add.99 = f32[1,1000]{1,0} add(f32[1,1000]{1,0} %dot.97, f32[1,1000]{1,0} %broadcast.98), metadata={op_type="BiasAdd" op_name="mobilenet/fc/BiasAdd"} %reshape.100 = f32[1,1000]{1,0} reshape(f32[1,1000]{1,0} %add.99), metadata={op_type="Reshape" op_name="mobilenet/softmax/Reshape"} %constant.101 = f32[] constant(-inf), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %reduce.106 = f32[1]{0} reduce(f32[1,1000]{1,0} %reshape.100, f32[] %constant.101), dimensions={1}, to_apply=%max_float_.102, metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %broadcast.107 = f32[1,1000]{1,0} broadcast(f32[1]{0} %reduce.106), dimensions={0}, metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %subtract.108 = f32[1,1000]{1,0} subtract(f32[1,1000]{1,0} %reshape.100, f32[1,1000]{1,0} %broadcast.107), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %exponential.109 = f32[1,1000]{1,0} exponential(f32[1,1000]{1,0} %subtract.108), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %convert.110 = f32[1,1000]{1,0} convert(f32[1,1000]{1,0} %exponential.109), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %constant.111 = f32[] constant(0), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %reduce.116 = f32[1]{0} reduce(f32[1,1000]{1,0} %convert.110, f32[] %constant.111), dimensions={1}, to_apply=%add_float_.112, metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %convert.117 = f32[1]{0} convert(f32[1]{0} %reduce.116), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %broadcast.118 = f32[1,1000]{1,0} broadcast(f32[1]{0} %convert.117), dimensions={0}, metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %divide.119 = f32[1,1000]{1,0} divide(f32[1,1000]{1,0} %exponential.109, f32[1,1000]{1,0} %broadcast.118), metadata={op_type="Softmax" op_name="mobilenet/softmax/Softmax"} %reshape.120 = f32[1,1000]{1,0} reshape(f32[1,1000]{1,0} %divide.119), metadata={op_name="XLA_Retvals"} %tuple.121 = (f32[1,1000]{1,0}) tuple(f32[1,1000]{1,0} %reshape.120), metadata={op_name="XLA_Retvals"} ROOT %get-tuple-element.122 = f32[1,1000]{1,0} get-tuple-element((f32[1,1000]{1,0}) %tuple.121), index=0, metadata={op_name="XLA_Retvals"} }
|