echarlaix's picture
echarlaix HF Staff
add OV model
3ab42d6
<?xml version="1.0"?>
<net name="torch_jit" version="11">
<layers>
<layer id="0" name="sample" type="Parameter" version="opset1">
<data shape="?,3,?,?" element_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="sample" />
</rt_info>
<output>
<port id="0" precision="FP32" names="sample">
<dim>-1</dim>
<dim>3</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="1" name="encoder.mid_block.attentions.0.to_out.0.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="0" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_out.0.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_out.0.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="2" name="encoder.mid_block.attentions.0.to_q.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="256" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_q.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_q.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="3" name="encoder.conv_in.weight" type="Const" version="opset1">
<data element_type="f32" shape="32, 3, 3, 3" offset="512" size="3456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_in.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_in.weight">
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="4" name="/encoder/conv_in/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_in/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>3</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="5" name="encoder.conv_in.bias" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="3968" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_in.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_in.bias">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="6" name="Constant_47327" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47327" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="7" name="ShapeOf_47333" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47333" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="8" name="ShapeOf_47325" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47325" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="9" name="ShapeOf_47326" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47326" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="10" name="Constant_47328" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47328" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="11" name="Subtract_47329" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47328, Subtract_47329" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="12" name="Broadcast_47330" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47330, Constant_47327" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="13" name="Concat_47334" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47334, Constant_47327, ShapeOf_47333" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="14" name="Reshape_47335" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47335" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="15" name="/encoder/conv_in/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_in/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_in/Conv_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="16" name="/encoder/down_blocks.0/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="17" name="/encoder/down_blocks.0/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="18" name="Constant_47345" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47345" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="19" name="MVN_47346" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47346" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="20" name="/encoder/down_blocks.0/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="21" name="Constant_47349" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47349" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="22" name="ShapeOf_47355" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47355" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="23" name="ShapeOf_47347" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47347" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="24" name="ShapeOf_47348" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47348" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="25" name="Constant_47350" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47350" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="26" name="Subtract_47351" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47350, Subtract_47351" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="27" name="Broadcast_47352" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47352, Constant_47349" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="28" name="Concat_47356" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47356, Constant_47349, ShapeOf_47355" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="29" name="Reshape_47357" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47357" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="30" name="Multiply_47360" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47360" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="31" name="/encoder/down_blocks.0/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="32" name="Constant_47361" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47361" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="33" name="ShapeOf_47367" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47367" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="34" name="Constant_47362" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47362" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="35" name="Subtract_47363" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47362, Subtract_47363" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="36" name="Broadcast_47364" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47364, Constant_47361" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="37" name="Concat_47368" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47368, Constant_47361, ShapeOf_47367" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="38" name="Reshape_47369" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47369" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="39" name="/encoder/down_blocks.0/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="40" name="/encoder/down_blocks.0/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="41" name="/encoder/down_blocks.0/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="42" name="onnx::Mul_583" type="Const" version="opset1">
<data element_type="f32" shape="32, 1, 1" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_583" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_581,onnx::Mul_583,onnx::Mul_590">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="43" name="/encoder/down_blocks.0/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="44" name="onnx::Add_584" type="Const" version="opset1">
<data element_type="f32" shape="32, 1, 1" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_584" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_582,onnx::Add_584,onnx::Add_591">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="45" name="/encoder/down_blocks.0/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="46" name="/encoder/down_blocks.0/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="47" name="/encoder/down_blocks.0/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="48" name="encoder.down_blocks.0.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="32, 32, 3, 3" offset="4400" size="36864" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv1.weight">
<dim>32</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="49" name="/encoder/down_blocks.0/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="50" name="encoder.down_blocks.0.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="41264" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv1.bias">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="51" name="Constant_47388" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47388" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="52" name="ShapeOf_47394" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47394" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="53" name="ShapeOf_47386" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47386" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="54" name="ShapeOf_47387" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47387" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="55" name="Constant_47389" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47389" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="56" name="Subtract_47390" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47389, Subtract_47390" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="57" name="Broadcast_47391" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47391, Constant_47388" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="58" name="Concat_47395" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47395, Constant_47388, ShapeOf_47394" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="59" name="Reshape_47396" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47396" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="60" name="/encoder/down_blocks.0/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="61" name="/encoder/down_blocks.0/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="62" name="/encoder/down_blocks.0/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="63" name="Constant_47406" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47406" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="64" name="MVN_47407" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47407" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="65" name="/encoder/down_blocks.0/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="66" name="Constant_47410" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47410" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="67" name="ShapeOf_47416" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47416" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="68" name="ShapeOf_47408" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47408" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="69" name="ShapeOf_47409" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47409" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="70" name="Constant_47411" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47411" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="71" name="Subtract_47412" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47411, Subtract_47412" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="72" name="Broadcast_47413" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47413, Constant_47410" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="73" name="Concat_47417" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47417, Constant_47410, ShapeOf_47416" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="74" name="Reshape_47418" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47418" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="75" name="Multiply_47421" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47421" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="76" name="/encoder/down_blocks.0/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="77" name="Constant_47422" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47422" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="78" name="ShapeOf_47428" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47428" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="79" name="Constant_47423" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47423" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="80" name="Subtract_47424" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47423, Subtract_47424" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="81" name="Broadcast_47425" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47425, Constant_47422" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="82" name="Concat_47429" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47429, Constant_47422, ShapeOf_47428" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="83" name="Reshape_47430" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47430" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="84" name="/encoder/down_blocks.0/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="85" name="/encoder/down_blocks.0/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="86" name="/encoder/down_blocks.0/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="87" name="/encoder/down_blocks.0/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="88" name="/encoder/down_blocks.0/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="89" name="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="90" name="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="91" name="encoder.down_blocks.0.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="32, 32, 3, 3" offset="41392" size="36864" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv2.weight">
<dim>32</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="92" name="/encoder/down_blocks.0/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="93" name="encoder.down_blocks.0.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="78256" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.resnets.0.conv2.bias">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="94" name="Constant_47449" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47449" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="95" name="ShapeOf_47455" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47455" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="96" name="ShapeOf_47447" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47447" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="97" name="ShapeOf_47448" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47448" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="98" name="Constant_47450" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47450" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="99" name="Subtract_47451" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47450, Subtract_47451" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="100" name="Broadcast_47452" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47452, Constant_47449" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="101" name="Concat_47456" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47456, Constant_47449, ShapeOf_47455" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="102" name="Reshape_47457" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47457" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="103" name="/encoder/down_blocks.0/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="104" name="/encoder/down_blocks.0/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="105" name="/encoder/down_blocks.0/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="106" name="/encoder/down_blocks.0/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="107" name="/encoder/down_blocks.0/downsamplers.0/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="78388" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="108" name="Constant_47467" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47467" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="109" name="/encoder/down_blocks.0/downsamplers.0/Constant" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78428" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="110" name="/encoder/down_blocks.0/downsamplers.0/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant, /encoder/down_blocks.0/downsamplers.0/ConstantOfShape, Constant_47467" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/ConstantOfShape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="111" name="/encoder/down_blocks.0/downsamplers.0/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Concat, /encoder/down_blocks.0/downsamplers.0/Constant_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Concat_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="112" name="/encoder/down_blocks.0/downsamplers.0/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="78436" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_2_output_0">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="113" name="/encoder/down_blocks.0/downsamplers.0/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_2, /encoder/down_blocks.0/downsamplers.0/Reshape" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Reshape_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="114" name="/encoder/down_blocks.0/downsamplers.0/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78452" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="115" name="/encoder/down_blocks.0/downsamplers.0/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78460" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="116" name="/encoder/down_blocks.0/downsamplers.0/Constant_6" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78452" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_6" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_6_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="117" name="/encoder/down_blocks.0/downsamplers.0/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Constant_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="118" name="/encoder/down_blocks.0/downsamplers.0/Slice" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_3, /encoder/down_blocks.0/downsamplers.0/Constant_4, /encoder/down_blocks.0/downsamplers.0/Constant_5, /encoder/down_blocks.0/downsamplers.0/Constant_6, /encoder/down_blocks.0/downsamplers.0/Slice" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Slice_output_0">
<dim>4</dim>
<dim>2</dim>
</port>
</output>
</layer>
<layer id="119" name="Constant_47481" type="Const" version="opset1">
<data element_type="i64" shape="2" offset="78468" size="16" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47481" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="120" name="/encoder/down_blocks.0/downsamplers.0/Transpose" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Transpose, Constant_47481" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
<dim>2</dim>
</port>
<port id="1" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Transpose_output_0">
<dim>2</dim>
<dim>4</dim>
</port>
</output>
</layer>
<layer id="121" name="Constant_159786" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78452" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Reshape_1" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="122" name="/encoder/down_blocks.0/downsamplers.0/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>2</dim>
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Reshape_1_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="123" name="/encoder/down_blocks.0/downsamplers.0/Cast" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Cast" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.0/downsamplers.0/Cast_output_0">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="124" name="Constant_47489" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47489" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="125" name="Split_47490" type="Split" version="opset1">
<data num_splits="2" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47489, Split_47490" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>8</dim>
</port>
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="126" name="/encoder/down_blocks.0/downsamplers.0/Constant_8" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78484" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Constant_8" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.0/downsamplers.0/Constant_8_output_0" />
</output>
</layer>
<layer id="127" name="/encoder/down_blocks.0/downsamplers.0/Pad" type="Pad" version="opset1">
<data pad_mode="constant" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/Pad" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
<port id="2" precision="I64">
<dim>4</dim>
</port>
<port id="3" precision="FP32" />
</input>
<output>
<port id="4" precision="FP32" names="/encoder/down_blocks.0/downsamplers.0/Pad_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="128" name="encoder.down_blocks.0.downsamplers.0.conv.weight" type="Const" version="opset1">
<data element_type="f32" shape="32, 32, 3, 3" offset="78488" size="36864" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.downsamplers.0.conv.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.downsamplers.0.conv.weight">
<dim>32</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="129" name="/encoder/down_blocks.0/downsamplers.0/conv/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="2, 2" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/conv/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="130" name="encoder.down_blocks.0.downsamplers.0.conv.bias" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="115352" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.0.downsamplers.0.conv.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.0.downsamplers.0.conv.bias">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="131" name="Constant_47505" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47505" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="132" name="ShapeOf_47511" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47511" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="133" name="ShapeOf_47503" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47503" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="134" name="ShapeOf_47504" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47504" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="135" name="Constant_47506" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47506" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="136" name="Subtract_47507" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47506, Subtract_47507" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="137" name="Broadcast_47508" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47508, Constant_47505" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="138" name="Concat_47512" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47512, Constant_47505, ShapeOf_47511" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="139" name="Reshape_47513" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47513" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="140" name="/encoder/down_blocks.0/downsamplers.0/conv/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.0/downsamplers.0/conv/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.0/downsamplers.0/conv/Conv_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="141" name="encoder.down_blocks.1.resnets.0.conv_shortcut.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 32, 1, 1" offset="115480" size="8192" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv_shortcut.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv_shortcut.weight">
<dim>64</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="142" name="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="143" name="encoder.down_blocks.1.resnets.0.conv_shortcut.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="123672" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv_shortcut.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv_shortcut.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="144" name="Constant_47642" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47642" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="145" name="ShapeOf_47648" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47648" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="146" name="ShapeOf_47640" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47640" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="147" name="ShapeOf_47641" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47641" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="148" name="Constant_47643" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47643" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="149" name="Subtract_47644" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47643, Subtract_47644" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="150" name="Broadcast_47645" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47645, Constant_47642" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="151" name="Concat_47649" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47649, Constant_47642, ShapeOf_47648" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="152" name="Reshape_47650" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47650" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="153" name="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/conv_shortcut/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="154" name="/encoder/down_blocks.1/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="155" name="/encoder/down_blocks.1/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="156" name="Constant_47523" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47523" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="157" name="MVN_47524" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47524" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="158" name="/encoder/down_blocks.1/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="159" name="Constant_47527" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47527" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="160" name="ShapeOf_47533" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47533" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="161" name="ShapeOf_47525" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47525" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="162" name="ShapeOf_47526" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47526" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="163" name="Constant_47528" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47528" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="164" name="Subtract_47529" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47528, Subtract_47529" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="165" name="Broadcast_47530" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47530, Constant_47527" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="166" name="Concat_47534" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47534, Constant_47527, ShapeOf_47533" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="167" name="Reshape_47535" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47535" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="168" name="Multiply_47538" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47538" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="169" name="/encoder/down_blocks.1/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="170" name="Constant_47539" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47539" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="171" name="ShapeOf_47545" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47545" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="172" name="Constant_47540" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47540" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="173" name="Subtract_47541" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47540, Subtract_47541" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="174" name="Broadcast_47542" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47542, Constant_47539" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="175" name="Concat_47546" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47546, Constant_47539, ShapeOf_47545" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="176" name="Reshape_47547" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47547" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="177" name="/encoder/down_blocks.1/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="178" name="/encoder/down_blocks.1/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="179" name="/encoder/down_blocks.1/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="180" name="/encoder/down_blocks.1/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="181" name="/encoder/down_blocks.1/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>32</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="182" name="/encoder/down_blocks.1/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="183" name="/encoder/down_blocks.1/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="184" name="encoder.down_blocks.1.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 32, 3, 3" offset="123936" size="73728" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv1.weight">
<dim>64</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="185" name="/encoder/down_blocks.1/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>32</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="186" name="encoder.down_blocks.1.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="197664" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv1.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="187" name="Constant_47566" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47566" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="188" name="ShapeOf_47572" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47572" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="189" name="ShapeOf_47564" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47564" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="190" name="ShapeOf_47565" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47565" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="191" name="Constant_47567" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47567" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="192" name="Subtract_47568" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47567, Subtract_47568" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="193" name="Broadcast_47569" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47569, Constant_47566" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="194" name="Concat_47573" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47573, Constant_47566, ShapeOf_47572" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="195" name="Reshape_47574" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47574" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="196" name="/encoder/down_blocks.1/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="197" name="/encoder/down_blocks.1/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="198" name="/encoder/down_blocks.1/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="199" name="Constant_47584" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47584" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="200" name="MVN_47585" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47585" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="201" name="/encoder/down_blocks.1/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="202" name="Constant_47588" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47588" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="203" name="ShapeOf_47594" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47594" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="204" name="ShapeOf_47586" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47586" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="205" name="ShapeOf_47587" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47587" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="206" name="Constant_47589" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47589" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="207" name="Subtract_47590" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47589, Subtract_47590" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="208" name="Broadcast_47591" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47591, Constant_47588" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="209" name="Concat_47595" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47595, Constant_47588, ShapeOf_47594" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="210" name="Reshape_47596" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47596" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="211" name="Multiply_47599" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47599" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="212" name="/encoder/down_blocks.1/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="213" name="Constant_47600" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47600" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="214" name="ShapeOf_47606" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47606" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="215" name="Constant_47601" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47601" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="216" name="Subtract_47602" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47601, Subtract_47602" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="217" name="Broadcast_47603" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47603, Constant_47600" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="218" name="Concat_47607" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47607, Constant_47600, ShapeOf_47606" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="219" name="Reshape_47608" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47608" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="220" name="/encoder/down_blocks.1/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="221" name="/encoder/down_blocks.1/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/down_blocks.1/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="222" name="/encoder/down_blocks.1/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="223" name="onnx::Mul_594" type="Const" version="opset1">
<data element_type="f32" shape="64, 1, 1" offset="197920" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_594" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_592,onnx::Mul_594,onnx::Mul_596,onnx::Mul_610,onnx::Mul_612,onnx::Mul_614">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="224" name="/encoder/down_blocks.1/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="225" name="onnx::Add_595" type="Const" version="opset1">
<data element_type="f32" shape="64, 1, 1" offset="198176" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_595" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_593,onnx::Add_595,onnx::Add_597,onnx::Add_611,onnx::Add_613,onnx::Add_615">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="226" name="/encoder/down_blocks.1/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="227" name="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="228" name="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="229" name="encoder.down_blocks.1.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 64, 3, 3" offset="198432" size="147456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv2.weight">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="230" name="/encoder/down_blocks.1/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="231" name="encoder.down_blocks.1.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="345888" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.down_blocks.1.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.down_blocks.1.resnets.0.conv2.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="232" name="Constant_47627" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47627" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="233" name="ShapeOf_47633" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47633" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="234" name="ShapeOf_47625" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47625" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="235" name="ShapeOf_47626" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47626" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="236" name="Constant_47628" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47628" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="237" name="Subtract_47629" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47628, Subtract_47629" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="238" name="Broadcast_47630" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47630, Constant_47627" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="239" name="Concat_47634" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47634, Constant_47627, ShapeOf_47633" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="240" name="Reshape_47635" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47635" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="241" name="/encoder/down_blocks.1/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="242" name="/encoder/down_blocks.1/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="243" name="/encoder/down_blocks.1/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/down_blocks.1/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="244" name="/encoder/down_blocks.1/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/down_blocks.1/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/down_blocks.1/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="245" name="/encoder/mid_block/resnets.0/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.0/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="246" name="/encoder/mid_block/resnets.0/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="247" name="Constant_47663" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47663" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="248" name="MVN_47664" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47664" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="249" name="/encoder/mid_block/resnets.0/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="250" name="Constant_47667" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47667" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="251" name="ShapeOf_47673" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47673" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="252" name="ShapeOf_47665" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47665" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="253" name="ShapeOf_47666" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47666" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="254" name="Constant_47668" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47668" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="255" name="Subtract_47669" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47668, Subtract_47669" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="256" name="Broadcast_47670" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47670, Constant_47667" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="257" name="Concat_47674" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47674, Constant_47667, ShapeOf_47673" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="258" name="Reshape_47675" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47675" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="259" name="Multiply_47678" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47678" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="260" name="/encoder/mid_block/resnets.0/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="261" name="Constant_47679" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47679" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="262" name="ShapeOf_47685" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47685" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="263" name="Constant_47680" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47680" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="264" name="Subtract_47681" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47680, Subtract_47681" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="265" name="Broadcast_47682" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47682, Constant_47679" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="266" name="Concat_47686" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47686, Constant_47679, ShapeOf_47685" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="267" name="Reshape_47687" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47687" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="268" name="/encoder/mid_block/resnets.0/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="269" name="/encoder/mid_block/resnets.0/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.0/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="270" name="/encoder/mid_block/resnets.0/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="271" name="/encoder/mid_block/resnets.0/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="272" name="/encoder/mid_block/resnets.0/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm1/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="273" name="/encoder/mid_block/resnets.0/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="274" name="/encoder/mid_block/resnets.0/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="275" name="encoder.mid_block.resnets.0.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 64, 3, 3" offset="346144" size="147456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv1.weight">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="276" name="/encoder/mid_block/resnets.0/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="277" name="encoder.mid_block.resnets.0.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="493600" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv1.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="278" name="Constant_47706" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47706" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="279" name="ShapeOf_47712" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47712" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="280" name="ShapeOf_47704" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47704" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="281" name="ShapeOf_47705" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47705" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="282" name="Constant_47707" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47707" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="283" name="Subtract_47708" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47707, Subtract_47708" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="284" name="Broadcast_47709" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47709, Constant_47706" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="285" name="Concat_47713" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47713, Constant_47706, ShapeOf_47712" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="286" name="Reshape_47714" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47714" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="287" name="/encoder/mid_block/resnets.0/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/conv1/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="288" name="/encoder/mid_block/resnets.0/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.0/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="289" name="/encoder/mid_block/resnets.0/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="290" name="Constant_47724" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47724" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="291" name="MVN_47725" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47725" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="292" name="/encoder/mid_block/resnets.0/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="293" name="Constant_47728" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47728" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="294" name="ShapeOf_47734" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47734" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="295" name="ShapeOf_47726" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47726" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="296" name="ShapeOf_47727" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47727" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="297" name="Constant_47729" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47729" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="298" name="Subtract_47730" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47729, Subtract_47730" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="299" name="Broadcast_47731" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47731, Constant_47728" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="300" name="Concat_47735" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47735, Constant_47728, ShapeOf_47734" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="301" name="Reshape_47736" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47736" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="302" name="Multiply_47739" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47739" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="303" name="/encoder/mid_block/resnets.0/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="304" name="Constant_47740" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47740" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="305" name="ShapeOf_47746" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47746" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="306" name="Constant_47741" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47741" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="307" name="Subtract_47742" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47741, Subtract_47742" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="308" name="Broadcast_47743" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47743, Constant_47740" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="309" name="Concat_47747" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47747, Constant_47740, ShapeOf_47746" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="310" name="Reshape_47748" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47748" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="311" name="/encoder/mid_block/resnets.0/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="312" name="/encoder/mid_block/resnets.0/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.0/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="313" name="/encoder/mid_block/resnets.0/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="314" name="/encoder/mid_block/resnets.0/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="315" name="/encoder/mid_block/resnets.0/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/norm2/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="316" name="/encoder/mid_block/resnets.0/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="317" name="/encoder/mid_block/resnets.0/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="318" name="encoder.mid_block.resnets.0.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 64, 3, 3" offset="493856" size="147456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv2.weight">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="319" name="/encoder/mid_block/resnets.0/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="320" name="encoder.mid_block.resnets.0.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="641312" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.0.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.0.conv2.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="321" name="Constant_47767" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47767" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="322" name="ShapeOf_47773" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47773" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="323" name="ShapeOf_47765" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47765" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="324" name="ShapeOf_47766" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47766" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="325" name="Constant_47768" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47768" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="326" name="Subtract_47769" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47768, Subtract_47769" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="327" name="Broadcast_47770" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47770, Constant_47767" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="328" name="Concat_47774" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47774, Constant_47767, ShapeOf_47773" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="329" name="Reshape_47775" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47775" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="330" name="/encoder/mid_block/resnets.0/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/conv2/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="331" name="/encoder/mid_block/resnets.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="332" name="/encoder/mid_block/resnets.0/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.0/Constant_output_0" />
</output>
</layer>
<layer id="333" name="/encoder/mid_block/resnets.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.0/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.0/Div_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="334" name="/encoder/mid_block/attentions.0/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="335" name="/encoder/mid_block/attentions.0/Constant" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_output_0" />
</output>
</layer>
<layer id="336" name="Constant_47784" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47784" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="337" name="/encoder/mid_block/attentions.0/Gather" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant, /encoder/mid_block/attentions.0/Gather, Constant_47784" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_output_0" />
</output>
</layer>
<layer id="338" name="Constant_128" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_128" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_272">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="339" name="/encoder/mid_block/attentions.0/Unsqueeze" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze, Constant_128" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="340" name="/encoder/mid_block/attentions.0/Shape_1" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="341" name="/encoder/mid_block/attentions.0/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_1_output_0" />
</output>
</layer>
<layer id="342" name="Constant_47788" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47788" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="343" name="/encoder/mid_block/attentions.0/Gather_1" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_1, /encoder/mid_block/attentions.0/Gather_1, Constant_47788" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_1_output_0" />
</output>
</layer>
<layer id="344" name="Constant_130" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_130" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_274">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="345" name="/encoder/mid_block/attentions.0/Unsqueeze_1" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_1, Constant_130" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="346" name="/encoder/mid_block/attentions.0/Shape_2" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_2_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="347" name="/encoder/mid_block/attentions.0/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_2_output_0" />
</output>
</layer>
<layer id="348" name="Constant_47792" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47792" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="349" name="/encoder/mid_block/attentions.0/Gather_2" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_2, /encoder/mid_block/attentions.0/Gather_2, Constant_47792" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_2_output_0" />
</output>
</layer>
<layer id="350" name="/encoder/mid_block/attentions.0/Shape_3" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_3_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="351" name="/encoder/mid_block/attentions.0/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="" offset="641568" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_3_output_0" />
</output>
</layer>
<layer id="352" name="Constant_47796" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47796" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="353" name="/encoder/mid_block/attentions.0/Gather_3" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_3, /encoder/mid_block/attentions.0/Gather_3, Constant_47796" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_3_output_0" />
</output>
</layer>
<layer id="354" name="/encoder/mid_block/attentions.0/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Mul" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Mul_output_0" />
</output>
</layer>
<layer id="355" name="Constant_132" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_132" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_276">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="356" name="/encoder/mid_block/attentions.0/Unsqueeze_2" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_2, Constant_132" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_2_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="357" name="/encoder/mid_block/attentions.0/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="358" name="/encoder/mid_block/attentions.0/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="359" name="/encoder/mid_block/attentions.0/group_norm/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/group_norm/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="360" name="/encoder/mid_block/attentions.0/group_norm/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="361" name="Constant_47897" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47897" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="362" name="MVN_47898" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_47898" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="363" name="/encoder/mid_block/attentions.0/group_norm/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="364" name="Constant_47901" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47901" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="365" name="ShapeOf_47907" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47907" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="366" name="ShapeOf_47899" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47899" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="367" name="ShapeOf_47900" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47900" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="368" name="Constant_47902" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47902" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="369" name="Subtract_47903" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47902, Subtract_47903" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="370" name="Broadcast_47904" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47904, Constant_47901" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="371" name="Concat_47908" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47908, Constant_47901, ShapeOf_47907" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="372" name="Reshape_47909" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47909" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="373" name="Multiply_47912" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_47912" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="374" name="/encoder/mid_block/attentions.0/group_norm/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="375" name="Constant_47913" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47913" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="376" name="ShapeOf_47919" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_47919" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="377" name="Constant_47914" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47914" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="378" name="Subtract_47915" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47914, Subtract_47915" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="379" name="Broadcast_47916" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_47916, Constant_47913" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="380" name="Concat_47920" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_47920, Constant_47913, ShapeOf_47919" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="381" name="Reshape_47921" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_47921" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="382" name="/encoder/mid_block/attentions.0/group_norm/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="383" name="/encoder/mid_block/attentions.0/group_norm/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/group_norm/Shape_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="384" name="/encoder/mid_block/attentions.0/group_norm/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="385" name="onnx::Mul_598" type="Const" version="opset1">
<data element_type="f32" shape="64, 1" offset="197920" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Mul_598" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Mul_598">
<dim>64</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="386" name="/encoder/mid_block/attentions.0/group_norm/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="387" name="onnx::Add_599" type="Const" version="opset1">
<data element_type="f32" shape="64, 1" offset="198176" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::Add_599" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::Add_599">
<dim>64</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="388" name="/encoder/mid_block/attentions.0/group_norm/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/group_norm/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/group_norm/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="389" name="onnx::MatMul_600" type="Const" version="opset1">
<data element_type="f32" shape="64, 64" offset="641576" size="16384" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_600" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_600">
<dim>64</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="390" name="/encoder/mid_block/attentions.0/to_q/MatMul" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_1, /encoder/mid_block/attentions.0/to_q/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_q/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="391" name="/encoder/mid_block/attentions.0/to_q/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_q/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_q/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="392" name="/encoder/mid_block/attentions.0/Shape_5" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_5" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_5_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="393" name="/encoder/mid_block/attentions.0/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_5_output_0" />
</output>
</layer>
<layer id="394" name="Constant_47945" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47945" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="395" name="/encoder/mid_block/attentions.0/Gather_5" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_5, /encoder/mid_block/attentions.0/Gather_5, Constant_47945" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_5_output_0" />
</output>
</layer>
<layer id="396" name="Constant_169" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_169" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_320">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="397" name="/encoder/mid_block/attentions.0/Unsqueeze_3" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_3, Constant_169" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="398" name="/encoder/mid_block/attentions.0/Shape_6" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_6_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="399" name="/encoder/mid_block/attentions.0/Constant_6" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_6" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_6_output_0" />
</output>
</layer>
<layer id="400" name="Constant_47949" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47949" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="401" name="/encoder/mid_block/attentions.0/Gather_6" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_6, /encoder/mid_block/attentions.0/Gather_6, Constant_47949" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_6_output_0" />
</output>
</layer>
<layer id="402" name="Constant_171" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_171" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_322">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="403" name="/encoder/mid_block/attentions.0/Unsqueeze_4" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_4, Constant_171" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="404" name="/encoder/mid_block/attentions.0/Constant_9" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_9" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_9_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="405" name="/encoder/mid_block/attentions.0/Shape_7" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_7_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="406" name="/encoder/mid_block/attentions.0/Constant_7" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_7" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_7_output_0" />
</output>
</layer>
<layer id="407" name="Constant_47953" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47953" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="408" name="/encoder/mid_block/attentions.0/Gather_7" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_7, /encoder/mid_block/attentions.0/Gather_7, Constant_47953" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_7_output_0" />
</output>
</layer>
<layer id="409" name="/encoder/mid_block/attentions.0/Constant_8" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_8" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_8_output_0" />
</output>
</layer>
<layer id="410" name="/encoder/mid_block/attentions.0/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_8, /encoder/mid_block/attentions.0/Div" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_output_0" />
</output>
</layer>
<layer id="411" name="/encoder/mid_block/attentions.0/Cast" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_output_0" />
</output>
</layer>
<layer id="412" name="/encoder/mid_block/attentions.0/Cast_1" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_1" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_1_output_0" />
</output>
</layer>
<layer id="413" name="Constant_174" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_174" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_326">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="414" name="/encoder/mid_block/attentions.0/Unsqueeze_5" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_5, Constant_174" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="415" name="/encoder/mid_block/attentions.0/Concat_1" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_1, /encoder/mid_block/attentions.0/Constant_9" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="416" name="/encoder/mid_block/attentions.0/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_1_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="417" name="Constant_48036" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="657960" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48036" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="418" name="/encoder/mid_block/attentions.0/Transpose_2" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_2_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="419" name="Constant_179" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_179" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_331">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="420" name="/encoder/mid_block/attentions.0/Unsqueeze_6" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_6, Constant_179" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_6_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="421" name="Constant_181" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_181" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_333">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="422" name="/encoder/mid_block/attentions.0/Unsqueeze_7" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_7, Constant_181" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_7_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="423" name="Constant_183" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_183" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_335">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="424" name="/encoder/mid_block/attentions.0/Unsqueeze_8" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_8, Constant_183" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_8_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="425" name="/encoder/mid_block/attentions.0/Concat_2" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_2" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_2_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="426" name="/encoder/mid_block/attentions.0/Reshape_2" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_2_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="427" name="encoder.mid_block.attentions.0.to_k.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="657992" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_k.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_k.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="428" name="onnx::MatMul_601" type="Const" version="opset1">
<data element_type="f32" shape="64, 64" offset="658248" size="16384" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_601" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_601">
<dim>64</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="429" name="/encoder/mid_block/attentions.0/to_k/MatMul" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_1, /encoder/mid_block/attentions.0/to_k/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_k/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="430" name="/encoder/mid_block/attentions.0/to_k/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_k/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_k/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="431" name="/encoder/mid_block/attentions.0/Shape_8" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_8" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_8_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="432" name="/encoder/mid_block/attentions.0/Constant_10" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_10" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_10_output_0" />
</output>
</layer>
<layer id="433" name="Constant_48116" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48116" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="434" name="/encoder/mid_block/attentions.0/Gather_8" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_10, /encoder/mid_block/attentions.0/Gather_8, Constant_48116" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_8_output_0" />
</output>
</layer>
<layer id="435" name="Constant_200" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_200" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_352">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="436" name="/encoder/mid_block/attentions.0/Unsqueeze_9" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_9, Constant_200" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_9_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="437" name="/encoder/mid_block/attentions.0/Shape_9" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_9" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_9_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="438" name="/encoder/mid_block/attentions.0/Constant_11" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_11" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_11_output_0" />
</output>
</layer>
<layer id="439" name="Constant_48120" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48120" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="440" name="/encoder/mid_block/attentions.0/Gather_9" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_11, /encoder/mid_block/attentions.0/Gather_9, Constant_48120" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_9_output_0" />
</output>
</layer>
<layer id="441" name="Constant_202" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_202" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_354">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="442" name="/encoder/mid_block/attentions.0/Unsqueeze_10" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_10, Constant_202" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_10_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="443" name="/encoder/mid_block/attentions.0/Constant_14" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_14" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_14_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="444" name="/encoder/mid_block/attentions.0/Shape_10" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_10" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_10_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="445" name="/encoder/mid_block/attentions.0/Constant_12" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_12" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_12_output_0" />
</output>
</layer>
<layer id="446" name="Constant_48124" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48124" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="447" name="/encoder/mid_block/attentions.0/Gather_10" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_12, /encoder/mid_block/attentions.0/Gather_10, Constant_48124" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_10_output_0" />
</output>
</layer>
<layer id="448" name="/encoder/mid_block/attentions.0/Constant_13" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_13" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_13_output_0" />
</output>
</layer>
<layer id="449" name="/encoder/mid_block/attentions.0/Div_1" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_13, /encoder/mid_block/attentions.0/Div_1" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_1_output_0" />
</output>
</layer>
<layer id="450" name="/encoder/mid_block/attentions.0/Cast_2" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_2" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_2_output_0" />
</output>
</layer>
<layer id="451" name="/encoder/mid_block/attentions.0/Cast_3" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_3" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_3_output_0" />
</output>
</layer>
<layer id="452" name="Constant_205" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_205" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_358">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="453" name="/encoder/mid_block/attentions.0/Unsqueeze_11" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_11, Constant_205" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_11_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="454" name="/encoder/mid_block/attentions.0/Concat_3" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_3, /encoder/mid_block/attentions.0/Constant_14" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_3_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="455" name="/encoder/mid_block/attentions.0/Reshape_3" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_3_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="456" name="Constant_48207" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="657960" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48207" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="457" name="/encoder/mid_block/attentions.0/Transpose_3" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_3_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="458" name="Constant_210" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_210" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_363">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="459" name="/encoder/mid_block/attentions.0/Unsqueeze_12" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_12, Constant_210" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_12_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="460" name="Constant_212" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_212" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_365">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="461" name="/encoder/mid_block/attentions.0/Unsqueeze_13" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_13, Constant_212" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_13_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="462" name="Constant_214" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_214" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_367">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="463" name="/encoder/mid_block/attentions.0/Unsqueeze_14" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_14, Constant_214" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_14_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="464" name="/encoder/mid_block/attentions.0/Concat_4" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_4" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_4_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="465" name="/encoder/mid_block/attentions.0/Reshape_4" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_4_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="466" name="/encoder/mid_block/attentions.0/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/MatMul, /encoder/mid_block/attentions.0/Transpose_5" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="467" name="/encoder/mid_block/attentions.0/Constant_23" type="Const" version="opset1">
<data element_type="f32" shape="" offset="674632" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_23" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/Constant_23_output_0" />
</output>
</layer>
<layer id="468" name="/encoder/mid_block/attentions.0/Mul_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Mul_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Mul_1_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="469" name="Constant_48476" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78484" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48476" />
</rt_info>
<output>
<port id="0" precision="FP32" />
</output>
</layer>
<layer id="470" name="/encoder/mid_block/attentions.0/Shape_14" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_14" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_14_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="471" name="/encoder/mid_block/attentions.0/Constant_20" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_20" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_20_output_0" />
</output>
</layer>
<layer id="472" name="Constant_48458" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48458" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="473" name="/encoder/mid_block/attentions.0/Gather_14" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_20, /encoder/mid_block/attentions.0/Gather_14, Constant_48458" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_14_output_0" />
</output>
</layer>
<layer id="474" name="Constant_258" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_258" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_412">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="475" name="/encoder/mid_block/attentions.0/Unsqueeze_21" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_21, Constant_258" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_21_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="476" name="/encoder/mid_block/attentions.0/Shape_15" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_15" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_15_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="477" name="/encoder/mid_block/attentions.0/Constant_21" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_21" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_21_output_0" />
</output>
</layer>
<layer id="478" name="Constant_48462" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48462" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="479" name="/encoder/mid_block/attentions.0/Gather_15" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_21, /encoder/mid_block/attentions.0/Gather_15, Constant_48462" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_15_output_0" />
</output>
</layer>
<layer id="480" name="Constant_260" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_260" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_414">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="481" name="/encoder/mid_block/attentions.0/Unsqueeze_22" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_22, Constant_260" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_22_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="482" name="/encoder/mid_block/attentions.0/Shape_16" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_16" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_16_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="483" name="/encoder/mid_block/attentions.0/Constant_22" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_22" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_22_output_0" />
</output>
</layer>
<layer id="484" name="Constant_48466" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48466" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="485" name="/encoder/mid_block/attentions.0/Gather_16" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_22, /encoder/mid_block/attentions.0/Gather_16, Constant_48466" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_16_output_0" />
</output>
</layer>
<layer id="486" name="Constant_262" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_262" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_416">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="487" name="/encoder/mid_block/attentions.0/Unsqueeze_23" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_23, Constant_262" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_23_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="488" name="/encoder/mid_block/attentions.0/Concat_7" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_7" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_7_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="489" name="/encoder/mid_block/attentions.0/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/ConstantOfShape" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/ConstantOfShape_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="490" name="/encoder/mid_block/attentions.0/Constant_24" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78484" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_24" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/Constant_24_output_0" />
</output>
</layer>
<layer id="491" name="/encoder/mid_block/attentions.0/Mul_2" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Mul_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Mul_2_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="492" name="/encoder/mid_block/attentions.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="493" name="/encoder/mid_block/attentions.0/Cast_6" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/attentions.0/Cast_6_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="494" name="/encoder/mid_block/attentions.0/Softmax" type="SoftMax" version="opset8">
<data axis="-1" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Softmax" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/attentions.0/Softmax_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="495" name="/encoder/mid_block/attentions.0/Cast_7" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/attentions.0/Cast_7_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="496" name="encoder.mid_block.attentions.0.to_v.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="674636" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.attentions.0.to_v.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.attentions.0.to_v.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="497" name="onnx::MatMul_602" type="Const" version="opset1">
<data element_type="f32" shape="64, 64" offset="674892" size="16384" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_602" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_602">
<dim>64</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="498" name="/encoder/mid_block/attentions.0/to_v/MatMul" type="MatMul" version="opset1">
<data transpose_a="true" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_1, /encoder/mid_block/attentions.0/to_v/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_v/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="499" name="/encoder/mid_block/attentions.0/to_v/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_v/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_v/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="500" name="/encoder/mid_block/attentions.0/Shape_11" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_11" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_11_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="501" name="/encoder/mid_block/attentions.0/Constant_15" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_15" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_15_output_0" />
</output>
</layer>
<layer id="502" name="Constant_48287" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48287" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="503" name="/encoder/mid_block/attentions.0/Gather_11" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_15, /encoder/mid_block/attentions.0/Gather_11, Constant_48287" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_11_output_0" />
</output>
</layer>
<layer id="504" name="Constant_231" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_231" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_384">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="505" name="/encoder/mid_block/attentions.0/Unsqueeze_15" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_15, Constant_231" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_15_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="506" name="/encoder/mid_block/attentions.0/Shape_12" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_12" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_12_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="507" name="/encoder/mid_block/attentions.0/Constant_16" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_16" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_16_output_0" />
</output>
</layer>
<layer id="508" name="Constant_48291" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48291" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="509" name="/encoder/mid_block/attentions.0/Gather_12" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_16, /encoder/mid_block/attentions.0/Gather_12, Constant_48291" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_12_output_0" />
</output>
</layer>
<layer id="510" name="Constant_233" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_233" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_386">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="511" name="/encoder/mid_block/attentions.0/Unsqueeze_16" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_16, Constant_233" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_16_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="512" name="/encoder/mid_block/attentions.0/Constant_19" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_19" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_19_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="513" name="/encoder/mid_block/attentions.0/Shape_13" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_13" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_13_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="514" name="/encoder/mid_block/attentions.0/Constant_17" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_17" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_17_output_0" />
</output>
</layer>
<layer id="515" name="Constant_48295" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48295" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="516" name="/encoder/mid_block/attentions.0/Gather_13" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_17, /encoder/mid_block/attentions.0/Gather_13, Constant_48295" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_13_output_0" />
</output>
</layer>
<layer id="517" name="/encoder/mid_block/attentions.0/Constant_18" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_18" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_18_output_0" />
</output>
</layer>
<layer id="518" name="/encoder/mid_block/attentions.0/Div_2" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_18, /encoder/mid_block/attentions.0/Div_2" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_2_output_0" />
</output>
</layer>
<layer id="519" name="/encoder/mid_block/attentions.0/Cast_4" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_4" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_4_output_0" />
</output>
</layer>
<layer id="520" name="/encoder/mid_block/attentions.0/Cast_5" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_5" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_5_output_0" />
</output>
</layer>
<layer id="521" name="Constant_236" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_236" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_390">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="522" name="/encoder/mid_block/attentions.0/Unsqueeze_17" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_17, Constant_236" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_17_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="523" name="/encoder/mid_block/attentions.0/Concat_5" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_5, /encoder/mid_block/attentions.0/Constant_19" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_5_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="524" name="/encoder/mid_block/attentions.0/Reshape_5" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_5" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_5_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="525" name="Constant_48378" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="657960" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48378" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="526" name="/encoder/mid_block/attentions.0/Transpose_4" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_4_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="527" name="Constant_241" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_241" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_395">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="528" name="/encoder/mid_block/attentions.0/Unsqueeze_18" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_18, Constant_241" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_18_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="529" name="Constant_243" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_243" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_397">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="530" name="/encoder/mid_block/attentions.0/Unsqueeze_19" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_19, Constant_243" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_19_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="531" name="Constant_245" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_245" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_399">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="532" name="/encoder/mid_block/attentions.0/Unsqueeze_20" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_20, Constant_245" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_20_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="533" name="/encoder/mid_block/attentions.0/Concat_6" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_6" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_6_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="534" name="/encoder/mid_block/attentions.0/Reshape_6" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_6_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="535" name="/encoder/mid_block/attentions.0/MatMul_1" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/MatMul_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/MatMul_1_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="536" name="/encoder/mid_block/attentions.0/Shape_17" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_17" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_17_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="537" name="/encoder/mid_block/attentions.0/Constant_25" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_25" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_25_output_0" />
</output>
</layer>
<layer id="538" name="Constant_48506" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48506" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="539" name="/encoder/mid_block/attentions.0/Gather_17" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_25, /encoder/mid_block/attentions.0/Gather_17, Constant_48506" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_17_output_0" />
</output>
</layer>
<layer id="540" name="/encoder/mid_block/attentions.0/Constant_28" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_28" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_28_output_0" />
</output>
</layer>
<layer id="541" name="/encoder/mid_block/attentions.0/Div_3" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_28, /encoder/mid_block/attentions.0/Div_3" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64" />
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Div_3_output_0" />
</output>
</layer>
<layer id="542" name="/encoder/mid_block/attentions.0/Cast_8" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_8" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_8_output_0" />
</output>
</layer>
<layer id="543" name="/encoder/mid_block/attentions.0/Cast_9" type="Convert" version="opset1">
<data destination_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Cast_9" />
</rt_info>
<input>
<port id="0" precision="I64" />
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Cast_9_output_0" />
</output>
</layer>
<layer id="544" name="Constant_290" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_290" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_445">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="545" name="/encoder/mid_block/attentions.0/Unsqueeze_24" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_24, Constant_290" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_24_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="546" name="/encoder/mid_block/attentions.0/Constant_29" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_29" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_29_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="547" name="/encoder/mid_block/attentions.0/Shape_18" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_18" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_18_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="548" name="/encoder/mid_block/attentions.0/Constant_26" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_26" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_26_output_0" />
</output>
</layer>
<layer id="549" name="Constant_48510" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48510" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="550" name="/encoder/mid_block/attentions.0/Gather_18" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_26, /encoder/mid_block/attentions.0/Gather_18, Constant_48510" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_18_output_0" />
</output>
</layer>
<layer id="551" name="Constant_293" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_293" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_449">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="552" name="/encoder/mid_block/attentions.0/Unsqueeze_25" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_25, Constant_293" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_25_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="553" name="/encoder/mid_block/attentions.0/Shape_19" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_19" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_19_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="554" name="/encoder/mid_block/attentions.0/Constant_27" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_27" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_27_output_0" />
</output>
</layer>
<layer id="555" name="Constant_48514" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48514" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="556" name="/encoder/mid_block/attentions.0/Gather_19" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_27, /encoder/mid_block/attentions.0/Gather_19, Constant_48514" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_19_output_0" />
</output>
</layer>
<layer id="557" name="Constant_295" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_295" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_451">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="558" name="/encoder/mid_block/attentions.0/Unsqueeze_26" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_26, Constant_295" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_26_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="559" name="/encoder/mid_block/attentions.0/Concat_8" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_8, /encoder/mid_block/attentions.0/Constant_29" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_8_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="560" name="/encoder/mid_block/attentions.0/Reshape_7" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_7_output_0">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="561" name="Constant_48597" type="Const" version="opset1">
<data element_type="i64" shape="4" offset="657960" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48597" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="562" name="/encoder/mid_block/attentions.0/Transpose_6" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_6" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_6_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="563" name="Constant_300" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_300" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_456">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="564" name="/encoder/mid_block/attentions.0/Unsqueeze_27" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_27, Constant_300" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_27_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="565" name="Constant_302" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_302" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_458">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="566" name="/encoder/mid_block/attentions.0/Unsqueeze_28" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_28, Constant_302" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_28_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="567" name="Constant_304" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_304" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_460">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="568" name="/encoder/mid_block/attentions.0/Unsqueeze_29" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_29, Constant_304" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_29_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="569" name="/encoder/mid_block/attentions.0/Concat_9" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_9" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Concat_9_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="570" name="/encoder/mid_block/attentions.0/Reshape_8" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_8" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_8_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="571" name="onnx::MatMul_609" type="Const" version="opset1">
<data element_type="f32" shape="64, 64" offset="691276" size="16384" />
<rt_info>
<attribute name="fused_names" version="0" value="onnx::MatMul_609" />
</rt_info>
<output>
<port id="0" precision="FP32" names="onnx::MatMul_609">
<dim>64</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="572" name="/encoder/mid_block/attentions.0/to_out.0/MatMul" type="MatMul" version="opset1">
<data transpose_a="false" transpose_b="false" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_out.0/MatMul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_out.0/MatMul_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="573" name="/encoder/mid_block/attentions.0/to_out.0/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/to_out.0/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/to_out.0/Add_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="574" name="Constant_48677" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="707660" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48677" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="575" name="/encoder/mid_block/attentions.0/Transpose_7" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose_7" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_7_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="576" name="Constant_47885" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="707660" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47885" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="577" name="/encoder/mid_block/attentions.0/Transpose" type="Transpose" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Transpose" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Transpose_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</output>
</layer>
<layer id="578" name="/encoder/mid_block/attentions.0/Shape_4" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Shape_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>64</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/attentions.0/Shape_4_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="579" name="/encoder/mid_block/attentions.0/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/attentions.0/Constant_4_output_0" />
</output>
</layer>
<layer id="580" name="Constant_47889" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_47889" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="581" name="/encoder/mid_block/attentions.0/Gather_4" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_4, /encoder/mid_block/attentions.0/Gather_4, Constant_47889" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/encoder/mid_block/attentions.0/Gather_4_output_0" />
</output>
</layer>
<layer id="582" name="Constant_311" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_311" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_468">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="583" name="/encoder/mid_block/attentions.0/Unsqueeze_30" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_30, Constant_311" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_30_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="584" name="Constant_313" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_313" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_470">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="585" name="/encoder/mid_block/attentions.0/Unsqueeze_31" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_31, Constant_313" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_31_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="586" name="Constant_315" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_315" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_472">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="587" name="/encoder/mid_block/attentions.0/Unsqueeze_32" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_32, Constant_315" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_32_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="588" name="Constant_317" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_317" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_474">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="589" name="/encoder/mid_block/attentions.0/Unsqueeze_33" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Unsqueeze_33, Constant_317" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/encoder/mid_block/attentions.0/Unsqueeze_33_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="590" name="/encoder/mid_block/attentions.0/Concat_10" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Concat_10" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/encoder/mid_block/attentions.0/Concat_10_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="591" name="/encoder/mid_block/attentions.0/Reshape_9" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Reshape_9" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Reshape_9_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="592" name="/encoder/mid_block/attentions.0/Add_1" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Add_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Add_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="593" name="/encoder/mid_block/attentions.0/Constant_30" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Constant_30" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/attentions.0/Constant_30_output_0" />
</output>
</layer>
<layer id="594" name="/encoder/mid_block/attentions.0/Div_4" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/attentions.0/Div_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/attentions.0/Div_4_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="595" name="/encoder/mid_block/resnets.1/norm1/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.1/norm1/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="596" name="/encoder/mid_block/resnets.1/norm1/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="597" name="Constant_48716" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48716" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="598" name="MVN_48717" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_48717" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="599" name="/encoder/mid_block/resnets.1/norm1/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="600" name="Constant_48720" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48720" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="601" name="ShapeOf_48726" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48726" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="602" name="ShapeOf_48718" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48718" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="603" name="ShapeOf_48719" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48719" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="604" name="Constant_48721" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48721" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="605" name="Subtract_48722" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48721, Subtract_48722" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="606" name="Broadcast_48723" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48723, Constant_48720" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="607" name="Concat_48727" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48727, Constant_48720, ShapeOf_48726" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="608" name="Reshape_48728" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48728" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="609" name="Multiply_48731" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_48731" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="610" name="/encoder/mid_block/resnets.1/norm1/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="611" name="Constant_48732" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48732" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="612" name="ShapeOf_48738" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48738" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="613" name="Constant_48733" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48733" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="614" name="Subtract_48734" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48733, Subtract_48734" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="615" name="Broadcast_48735" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48735, Constant_48732" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="616" name="Concat_48739" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48739, Constant_48732, ShapeOf_48738" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="617" name="Reshape_48740" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48740" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="618" name="/encoder/mid_block/resnets.1/norm1/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="619" name="/encoder/mid_block/resnets.1/norm1/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.1/norm1/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="620" name="/encoder/mid_block/resnets.1/norm1/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="621" name="/encoder/mid_block/resnets.1/norm1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="622" name="/encoder/mid_block/resnets.1/norm1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm1/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="623" name="/encoder/mid_block/resnets.1/nonlinearity/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity/Sigmoid_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="624" name="/encoder/mid_block/resnets.1/nonlinearity/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="625" name="encoder.mid_block.resnets.1.conv1.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 64, 3, 3" offset="707684" size="147456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv1.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv1.weight">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="626" name="/encoder/mid_block/resnets.1/conv1/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv1/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="627" name="encoder.mid_block.resnets.1.conv1.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="855140" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv1.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv1.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="628" name="Constant_48759" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48759" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="629" name="ShapeOf_48765" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48765" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="630" name="ShapeOf_48757" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48757" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="631" name="ShapeOf_48758" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48758" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="632" name="Constant_48760" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48760" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="633" name="Subtract_48761" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48760, Subtract_48761" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="634" name="Broadcast_48762" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48762, Constant_48759" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="635" name="Concat_48766" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48766, Constant_48759, ShapeOf_48765" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="636" name="Reshape_48767" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48767" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="637" name="/encoder/mid_block/resnets.1/conv1/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv1/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/conv1/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="638" name="/encoder/mid_block/resnets.1/norm2/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/mid_block/resnets.1/norm2/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="639" name="/encoder/mid_block/resnets.1/norm2/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="640" name="Constant_48777" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48777" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="641" name="MVN_48778" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_48778" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="642" name="/encoder/mid_block/resnets.1/norm2/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="643" name="Constant_48781" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48781" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="644" name="ShapeOf_48787" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48787" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="645" name="ShapeOf_48779" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48779" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="646" name="ShapeOf_48780" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48780" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="647" name="Constant_48782" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48782" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="648" name="Subtract_48783" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48782, Subtract_48783" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="649" name="Broadcast_48784" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48784, Constant_48781" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="650" name="Concat_48788" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48788, Constant_48781, ShapeOf_48787" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="651" name="Reshape_48789" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48789" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="652" name="Multiply_48792" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_48792" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="653" name="/encoder/mid_block/resnets.1/norm2/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="654" name="Constant_48793" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48793" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="655" name="ShapeOf_48799" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48799" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="656" name="Constant_48794" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48794" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="657" name="Subtract_48795" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48794, Subtract_48795" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="658" name="Broadcast_48796" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48796, Constant_48793" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="659" name="Concat_48800" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48800, Constant_48793, ShapeOf_48799" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="660" name="Reshape_48801" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48801" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="661" name="/encoder/mid_block/resnets.1/norm2/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="662" name="/encoder/mid_block/resnets.1/norm2/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/mid_block/resnets.1/norm2/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="663" name="/encoder/mid_block/resnets.1/norm2/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="664" name="/encoder/mid_block/resnets.1/norm2/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="665" name="/encoder/mid_block/resnets.1/norm2/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/norm2/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/norm2/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="666" name="/encoder/mid_block/resnets.1/nonlinearity_1/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity_1/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity_1/Sigmoid_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="667" name="/encoder/mid_block/resnets.1/nonlinearity_1/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/nonlinearity_1/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/nonlinearity_1/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="668" name="encoder.mid_block.resnets.1.conv2.weight" type="Const" version="opset1">
<data element_type="f32" shape="64, 64, 3, 3" offset="855396" size="147456" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv2.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv2.weight">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="669" name="/encoder/mid_block/resnets.1/conv2/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv2/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="670" name="encoder.mid_block.resnets.1.conv2.bias" type="Const" version="opset1">
<data element_type="f32" shape="64" offset="1002852" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.mid_block.resnets.1.conv2.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.mid_block.resnets.1.conv2.bias">
<dim>64</dim>
</port>
</output>
</layer>
<layer id="671" name="Constant_48820" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48820" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="672" name="ShapeOf_48826" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="123928" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48826" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="673" name="ShapeOf_48818" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48818" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="674" name="ShapeOf_48819" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48819" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="675" name="Constant_48821" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48821" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="676" name="Subtract_48822" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48821, Subtract_48822" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="677" name="Broadcast_48823" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48823, Constant_48820" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="678" name="Concat_48827" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48827, Constant_48820, ShapeOf_48826" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="679" name="Reshape_48828" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48828" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>64</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="680" name="/encoder/mid_block/resnets.1/conv2/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/conv2/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/conv2/Conv_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="681" name="/encoder/mid_block/resnets.1/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="682" name="/encoder/mid_block/resnets.1/Constant" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/Constant" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/mid_block/resnets.1/Constant_output_0" />
</output>
</layer>
<layer id="683" name="/encoder/mid_block/resnets.1/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/mid_block/resnets.1/Div" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/encoder/mid_block/resnets.1/Div_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="684" name="/encoder/conv_norm_out/Constant" type="Const" version="opset1">
<data element_type="i64" shape="3" offset="4120" size="24" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/encoder/conv_norm_out/Constant_output_0">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="685" name="/encoder/conv_norm_out/Reshape" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Reshape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Reshape_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="686" name="Constant_48841" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48841" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="687" name="MVN_48842" type="MVN" version="opset6">
<data eps="9.9999999747524271e-07" normalize_variance="true" eps_mode="INSIDE_SQRT" />
<rt_info>
<attribute name="fused_names" version="0" value="MVN_48842" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="688" name="/encoder/conv_norm_out/Constant_1" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4144" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Constant_1" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/conv_norm_out/Constant_1_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="689" name="Constant_48845" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48845" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="690" name="ShapeOf_48851" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48851" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="691" name="ShapeOf_48843" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48843" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="692" name="ShapeOf_48844" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48844" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="693" name="Constant_48846" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48846" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="694" name="Subtract_48847" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48846, Subtract_48847" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="695" name="Broadcast_48848" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48848, Constant_48845" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="696" name="Concat_48852" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48852, Constant_48845, ShapeOf_48851" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="697" name="Reshape_48853" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48853" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="698" name="Multiply_48856" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_48856" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="699" name="/encoder/conv_norm_out/Constant_2" type="Const" version="opset1">
<data element_type="f32" shape="32" offset="4272" size="128" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Constant_2" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/encoder/conv_norm_out/Constant_2_output_0">
<dim>32</dim>
</port>
</output>
</layer>
<layer id="700" name="Constant_48857" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48857" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="701" name="ShapeOf_48863" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4104" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48863" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="702" name="Constant_48858" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48858" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="703" name="Subtract_48859" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48858, Subtract_48859" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="704" name="Broadcast_48860" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48860, Constant_48857" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="705" name="Concat_48864" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48864, Constant_48857, ShapeOf_48863" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>3</dim>
</port>
</output>
</layer>
<layer id="706" name="Reshape_48865" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48865" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>32</dim>
</port>
<port id="1" precision="I64">
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="707" name="/encoder/conv_norm_out/InstanceNormalization" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/InstanceNormalization" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>32</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/InstanceNormalization_output_0">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="708" name="/encoder/conv_norm_out/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/encoder/conv_norm_out/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="709" name="/encoder/conv_norm_out/Reshape_1" type="Reshape" version="opset1">
<data special_zero="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Reshape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>32</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Reshape_1_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="710" name="/encoder/conv_norm_out/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="711" name="/encoder/conv_norm_out/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_norm_out/Add" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>64</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_norm_out/Add_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="712" name="/encoder/conv_act/Sigmoid" type="Sigmoid" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_act/Sigmoid" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/encoder/conv_act/Sigmoid_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="713" name="/encoder/conv_act/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_act/Mul" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_act/Mul_output_0">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="714" name="encoder.conv_out.weight" type="Const" version="opset1">
<data element_type="f32" shape="8, 64, 3, 3" offset="1003108" size="18432" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_out.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_out.weight">
<dim>8</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</output>
</layer>
<layer id="715" name="/encoder/conv_out/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="1, 1" pads_end="1, 1" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_out/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>64</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>8</dim>
<dim>64</dim>
<dim>3</dim>
<dim>3</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="716" name="encoder.conv_out.bias" type="Const" version="opset1">
<data element_type="f32" shape="8" offset="1021540" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="encoder.conv_out.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="encoder.conv_out.bias">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="717" name="Constant_48884" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48884" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="718" name="ShapeOf_48890" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="1021572" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48890" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="719" name="ShapeOf_48882" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48882" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="720" name="ShapeOf_48883" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48883" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="721" name="Constant_48885" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48885" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="722" name="Subtract_48886" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48885, Subtract_48886" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="723" name="Broadcast_48887" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48887, Constant_48884" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="724" name="Concat_48891" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48891, Constant_48884, ShapeOf_48890" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="725" name="Reshape_48892" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48892" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="726" name="/encoder/conv_out/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/encoder/conv_out/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/encoder/conv_out/Conv_output_0">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="727" name="quant_conv.weight" type="Const" version="opset1">
<data element_type="f32" shape="8, 8, 1, 1" offset="1021580" size="256" />
<rt_info>
<attribute name="fused_names" version="0" value="quant_conv.weight" />
</rt_info>
<output>
<port id="0" precision="FP32" names="quant_conv.weight">
<dim>8</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="728" name="/quant_conv/Conv/WithoutBiases" type="Convolution" version="opset1">
<data strides="1, 1" dilations="1, 1" pads_begin="0, 0" pads_end="0, 0" auto_pad="explicit" />
<rt_info>
<attribute name="fused_names" version="0" value="/quant_conv/Conv/WithoutBiases" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>8</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="729" name="quant_conv.bias" type="Const" version="opset1">
<data element_type="f32" shape="8" offset="1021836" size="32" />
<rt_info>
<attribute name="fused_names" version="0" value="quant_conv.bias" />
</rt_info>
<output>
<port id="0" precision="FP32" names="quant_conv.bias">
<dim>8</dim>
</port>
</output>
</layer>
<layer id="730" name="Constant_48899" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48899" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="731" name="ShapeOf_48905" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="1021572" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48905" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="732" name="ShapeOf_48897" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48897" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="733" name="ShapeOf_48898" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48898" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="734" name="Constant_48900" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48900" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="735" name="Subtract_48901" type="Subtract" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48900, Subtract_48901" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="736" name="Broadcast_48902" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48902, Constant_48899" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</output>
</layer>
<layer id="737" name="Concat_48906" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="Concat_48906, Constant_48899, ShapeOf_48905" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>2</dim>
</port>
</input>
<output>
<port id="3" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="738" name="Reshape_48907" type="Reshape" version="opset1">
<data special_zero="false" />
<rt_info>
<attribute name="fused_names" version="0" value="Reshape_48907" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>8</dim>
</port>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</output>
</layer>
<layer id="739" name="/quant_conv/Conv" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/quant_conv/Conv" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
<dim>8</dim>
<dim>1</dim>
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/quant_conv/Conv_output_0">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="740" name="/Constant_1" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_1" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="741" name="/Shape" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="742" name="/Constant" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="743" name="Constant_48913" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48913" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="744" name="/Gather" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant, /Gather, Constant_48913" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="745" name="/Constant_2" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_2" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_2_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="746" name="/Add" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Add, /Constant_2" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Add_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="747" name="/Constant_3" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_3" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="748" name="/Div" type="Divide" version="opset1">
<data auto_broadcast="numpy" m_pythondiv="true" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_3, /Div" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Div_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="749" name="/Constant_4" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_4" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_4_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="750" name="/Mul" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_4, /Mul" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Mul_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="751" name="Constant_48922" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48922" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="752" name="ShapeOf_48923" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48923" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="753" name="Broadcast_48924" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48924, Constant_48922, ShapeOf_48923" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="754" name="/Slice" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/Constant, /Slice" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="FP32" names="/Slice_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="755" name="/Constant_7" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1021868" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_7" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/Constant_7_output_0" />
</output>
</layer>
<layer id="756" name="/Constant_6" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1021872" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_6" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/Constant_6_output_0" />
</output>
</layer>
<layer id="757" name="/Constant_5" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_5" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_5_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="758" name="/Mul_1" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_5, /Mul_1" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Mul_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="759" name="Constant_48975" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_48975" />
</rt_info>
<output>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="760" name="ShapeOf_48976" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_48976" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="761" name="Broadcast_48977" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Broadcast_48977, Constant_48975" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="762" name="/Slice_1" type="Slice" version="opset8">
<rt_info>
<attribute name="fused_names" version="0" value="/Slice_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>8</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
<port id="4" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="5" precision="FP32" names="/Slice_1_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="763" name="Maximum_49030" type="Maximum" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Maximum_49030" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="764" name="/Clip" type="Minimum" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Clip" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/Clip_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="765" name="/Constant_8" type="Const" version="opset1">
<data element_type="f32" shape="" offset="1021876" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_8" />
</rt_info>
<output>
<port id="0" precision="FP32" names="/Constant_8_output_0" />
</output>
</layer>
<layer id="766" name="/Mul_2" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Mul_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32" />
</input>
<output>
<port id="2" precision="FP32" names="/Mul_2_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="767" name="/Exp" type="Exp" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Exp" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/Exp_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="768" name="Constant_49099" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49099" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="769" name="Constant_49061" type="Const" version="opset1">
<data element_type="f32" shape="" offset="78484" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49061" />
</rt_info>
<output>
<port id="0" precision="FP32" />
</output>
</layer>
<layer id="770" name="/Shape_1" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_1" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_1_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="771" name="/Constant_9" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_9" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_9_output_0" />
</output>
</layer>
<layer id="772" name="Constant_49037" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49037" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="773" name="/Gather_1" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_9, /Gather_1, Constant_49037" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_1_output_0" />
</output>
</layer>
<layer id="774" name="Constant_396" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_396" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_567">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="775" name="/Unsqueeze" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze, Constant_396" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="776" name="/Shape_2" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_2" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_2_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="777" name="/Constant_10" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4096" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_10" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_10_output_0" />
</output>
</layer>
<layer id="778" name="Constant_49041" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49041" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="779" name="/Gather_2" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_10, /Gather_2, Constant_49041" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_2_output_0" />
</output>
</layer>
<layer id="780" name="Constant_398" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_398" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_569">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="781" name="/Unsqueeze_1" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze_1, Constant_398" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_1_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="782" name="/Shape_3" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_3_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="783" name="/Constant_11" type="Const" version="opset1">
<data element_type="i64" shape="" offset="4112" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_11" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_11_output_0" />
</output>
</layer>
<layer id="784" name="Constant_49045" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49045" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="785" name="/Gather_3" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_11, /Gather_3, Constant_49045" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_3_output_0" />
</output>
</layer>
<layer id="786" name="Constant_400" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_400" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_571">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="787" name="/Unsqueeze_2" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze_2, Constant_400" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_2_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="788" name="/Shape_4" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="/Shape_4" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64" names="/Shape_4_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="789" name="/Constant_12" type="Const" version="opset1">
<data element_type="i64" shape="" offset="641568" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_12" />
</rt_info>
<output>
<port id="0" precision="I64" names="/Constant_12_output_0" />
</output>
</layer>
<layer id="790" name="Constant_49049" type="Const" version="opset1">
<data element_type="i64" shape="" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49049" />
</rt_info>
<output>
<port id="0" precision="I64" />
</output>
</layer>
<layer id="791" name="/Gather_4" type="Gather" version="opset8">
<data batch_dims="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Constant_12, /Gather_4, Constant_49049" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="I64" />
<port id="2" precision="I64" />
</input>
<output>
<port id="3" precision="I64" names="/Gather_4_output_0" />
</output>
</layer>
<layer id="792" name="Constant_402" type="Const" version="opset1">
<data element_type="i64" shape="1" offset="78420" size="8" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_402" />
</rt_info>
<output>
<port id="0" precision="I64" names="onnx::Unsqueeze_573">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="793" name="/Unsqueeze_3" type="Unsqueeze" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="/Unsqueeze_3, Constant_402" />
</rt_info>
<input>
<port id="0" precision="I64" />
<port id="1" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="I64" names="/Unsqueeze_3_output_0">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="794" name="/Concat" type="Concat" version="opset1">
<data axis="0" />
<rt_info>
<attribute name="fused_names" version="0" value="/Concat" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>1</dim>
</port>
<port id="1" precision="I64">
<dim>1</dim>
</port>
<port id="2" precision="I64">
<dim>1</dim>
</port>
<port id="3" precision="I64">
<dim>1</dim>
</port>
</input>
<output>
<port id="4" precision="I64" names="/Concat_output_0">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="795" name="/ConstantOfShape" type="Broadcast" version="opset3">
<data mode="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/ConstantOfShape" />
</rt_info>
<input>
<port id="0" precision="FP32" />
<port id="1" precision="I64">
<dim>4</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/ConstantOfShape_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="796" name="ShapeOf_49081" type="ShapeOf" version="opset3">
<data output_type="i64" />
<rt_info>
<attribute name="fused_names" version="0" value="ShapeOf_49081" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="I64">
<dim>4</dim>
</port>
</output>
</layer>
<layer id="797" name="Constant_49082" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="78484" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49082" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="798" name="Constant_49083" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="78384" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49083" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="799" name="RandomUniform_49084" type="RandomUniform" version="opset8">
<data output_type="f32" op_seed="6327" global_seed="0" />
<rt_info>
<attribute name="fused_names" version="0" value="RandomUniform_49084" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="800" name="Log_49093" type="Log" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="Log_49093" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="801" name="Constant_49091" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="1021880" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49091" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="802" name="Multiply_49094" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_49094" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="803" name="Sqrt_49095" type="Sqrt" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="Sqrt_49095" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="804" name="RandomUniform_49089" type="RandomUniform" version="opset8">
<data output_type="f32" op_seed="9590" global_seed="0" />
<rt_info>
<attribute name="fused_names" version="0" value="RandomUniform_49089" />
</rt_info>
<input>
<port id="0" precision="I64">
<dim>4</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
<port id="2" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="3" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="805" name="Constant_49090" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="1021884" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49090" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="806" name="Multiply_49096" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_49096" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="807" name="Multiply_49097" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_49097" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="808" name="Cos_49098" type="Cos" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="Cos_49098" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="809" name="Multiply_49101" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_49101" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="810" name="Multiply_49102" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="Multiply_49102" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="811" name="Constant_49100" type="Const" version="opset1">
<data element_type="f32" shape="1" offset="78484" size="4" />
<rt_info>
<attribute name="fused_names" version="0" value="Constant_49100" />
</rt_info>
<output>
<port id="0" precision="FP32">
<dim>1</dim>
</port>
</output>
</layer>
<layer id="812" name="/RandomNormalLike" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/RandomNormalLike" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/RandomNormalLike_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="813" name="/Cast" type="Convert" version="opset1">
<data destination_type="f32" />
<rt_info>
<attribute name="fused_names" version="0" value="/Cast" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="1" precision="FP32" names="/Cast_output_0">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="814" name="/Mul_3" type="Multiply" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="/Mul_3" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="/Mul_3_output_0">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="815" name="latent_sample" type="Add" version="opset1">
<data auto_broadcast="numpy" />
<rt_info>
<attribute name="fused_names" version="0" value="latent_sample" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
<port id="1" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
<output>
<port id="2" precision="FP32" names="latent_sample">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</output>
</layer>
<layer id="816" name="latent_sample/sink_port_0" type="Result" version="opset1">
<rt_info>
<attribute name="fused_names" version="0" value="latent_sample/sink_port_0" />
</rt_info>
<input>
<port id="0" precision="FP32">
<dim>-1</dim>
<dim>4</dim>
<dim>-1</dim>
<dim>-1</dim>
</port>
</input>
</layer>
</layers>
<edges>
<edge from-layer="0" from-port="0" to-layer="4" to-port="0" />
<edge from-layer="1" from-port="0" to-layer="573" to-port="0" />
<edge from-layer="2" from-port="0" to-layer="391" to-port="0" />
<edge from-layer="3" from-port="0" to-layer="4" to-port="1" />
<edge from-layer="4" from-port="2" to-layer="8" to-port="0" />
<edge from-layer="4" from-port="2" to-layer="15" to-port="0" />
<edge from-layer="5" from-port="0" to-layer="14" to-port="0" />
<edge from-layer="6" from-port="0" to-layer="12" to-port="0" />
<edge from-layer="6" from-port="0" to-layer="13" to-port="0" />
<edge from-layer="7" from-port="0" to-layer="13" to-port="1" />
<edge from-layer="8" from-port="1" to-layer="9" to-port="0" />
<edge from-layer="9" from-port="1" to-layer="11" to-port="0" />
<edge from-layer="10" from-port="0" to-layer="11" to-port="1" />
<edge from-layer="11" from-port="2" to-layer="12" to-port="1" />
<edge from-layer="12" from-port="2" to-layer="13" to-port="2" />
<edge from-layer="13" from-port="3" to-layer="14" to-port="1" />
<edge from-layer="14" from-port="2" to-layer="15" to-port="1" />
<edge from-layer="15" from-port="2" to-layer="17" to-port="0" />
<edge from-layer="15" from-port="2" to-layer="104" to-port="0" />
<edge from-layer="15" from-port="2" to-layer="40" to-port="0" />
<edge from-layer="16" from-port="0" to-layer="17" to-port="1" />
<edge from-layer="17" from-port="2" to-layer="19" to-port="0" />
<edge from-layer="18" from-port="0" to-layer="19" to-port="1" />
<edge from-layer="19" from-port="2" to-layer="23" to-port="0" />
<edge from-layer="19" from-port="2" to-layer="30" to-port="0" />
<edge from-layer="20" from-port="0" to-layer="29" to-port="0" />
<edge from-layer="21" from-port="0" to-layer="27" to-port="0" />
<edge from-layer="21" from-port="0" to-layer="28" to-port="0" />
<edge from-layer="22" from-port="0" to-layer="28" to-port="1" />
<edge from-layer="23" from-port="1" to-layer="24" to-port="0" />
<edge from-layer="24" from-port="1" to-layer="26" to-port="0" />
<edge from-layer="24" from-port="1" to-layer="35" to-port="0" />
<edge from-layer="25" from-port="0" to-layer="26" to-port="1" />
<edge from-layer="26" from-port="2" to-layer="27" to-port="1" />
<edge from-layer="27" from-port="2" to-layer="28" to-port="2" />
<edge from-layer="28" from-port="3" to-layer="29" to-port="1" />
<edge from-layer="29" from-port="2" to-layer="30" to-port="1" />
<edge from-layer="30" from-port="2" to-layer="39" to-port="0" />
<edge from-layer="31" from-port="0" to-layer="38" to-port="0" />
<edge from-layer="32" from-port="0" to-layer="36" to-port="0" />
<edge from-layer="32" from-port="0" to-layer="37" to-port="0" />
<edge from-layer="33" from-port="0" to-layer="37" to-port="1" />
<edge from-layer="34" from-port="0" to-layer="35" to-port="1" />
<edge from-layer="35" from-port="2" to-layer="36" to-port="1" />
<edge from-layer="36" from-port="2" to-layer="37" to-port="2" />
<edge from-layer="37" from-port="3" to-layer="38" to-port="1" />
<edge from-layer="38" from-port="2" to-layer="39" to-port="1" />
<edge from-layer="39" from-port="2" to-layer="41" to-port="0" />
<edge from-layer="40" from-port="1" to-layer="41" to-port="1" />
<edge from-layer="41" from-port="2" to-layer="43" to-port="0" />
<edge from-layer="42" from-port="0" to-layer="87" to-port="1" />
<edge from-layer="42" from-port="0" to-layer="180" to-port="1" />
<edge from-layer="42" from-port="0" to-layer="43" to-port="1" />
<edge from-layer="43" from-port="2" to-layer="45" to-port="0" />
<edge from-layer="44" from-port="0" to-layer="88" to-port="1" />
<edge from-layer="44" from-port="0" to-layer="45" to-port="1" />
<edge from-layer="44" from-port="0" to-layer="181" to-port="1" />
<edge from-layer="45" from-port="2" to-layer="46" to-port="0" />
<edge from-layer="45" from-port="2" to-layer="47" to-port="0" />
<edge from-layer="46" from-port="1" to-layer="47" to-port="1" />
<edge from-layer="47" from-port="2" to-layer="49" to-port="0" />
<edge from-layer="48" from-port="0" to-layer="49" to-port="1" />
<edge from-layer="49" from-port="2" to-layer="60" to-port="0" />
<edge from-layer="49" from-port="2" to-layer="53" to-port="0" />
<edge from-layer="50" from-port="0" to-layer="59" to-port="0" />
<edge from-layer="51" from-port="0" to-layer="58" to-port="0" />
<edge from-layer="51" from-port="0" to-layer="57" to-port="0" />
<edge from-layer="52" from-port="0" to-layer="58" to-port="1" />
<edge from-layer="53" from-port="1" to-layer="54" to-port="0" />
<edge from-layer="54" from-port="1" to-layer="56" to-port="0" />
<edge from-layer="55" from-port="0" to-layer="56" to-port="1" />
<edge from-layer="56" from-port="2" to-layer="57" to-port="1" />
<edge from-layer="57" from-port="2" to-layer="58" to-port="2" />
<edge from-layer="58" from-port="3" to-layer="59" to-port="1" />
<edge from-layer="59" from-port="2" to-layer="60" to-port="1" />
<edge from-layer="60" from-port="2" to-layer="62" to-port="0" />
<edge from-layer="60" from-port="2" to-layer="85" to-port="0" />
<edge from-layer="61" from-port="0" to-layer="62" to-port="1" />
<edge from-layer="62" from-port="2" to-layer="64" to-port="0" />
<edge from-layer="63" from-port="0" to-layer="64" to-port="1" />
<edge from-layer="64" from-port="2" to-layer="75" to-port="0" />
<edge from-layer="64" from-port="2" to-layer="68" to-port="0" />
<edge from-layer="65" from-port="0" to-layer="74" to-port="0" />
<edge from-layer="66" from-port="0" to-layer="72" to-port="0" />
<edge from-layer="66" from-port="0" to-layer="73" to-port="0" />
<edge from-layer="67" from-port="0" to-layer="73" to-port="1" />
<edge from-layer="68" from-port="1" to-layer="69" to-port="0" />
<edge from-layer="69" from-port="1" to-layer="71" to-port="0" />
<edge from-layer="69" from-port="1" to-layer="80" to-port="0" />
<edge from-layer="70" from-port="0" to-layer="71" to-port="1" />
<edge from-layer="71" from-port="2" to-layer="72" to-port="1" />
<edge from-layer="72" from-port="2" to-layer="73" to-port="2" />
<edge from-layer="73" from-port="3" to-layer="74" to-port="1" />
<edge from-layer="74" from-port="2" to-layer="75" to-port="1" />
<edge from-layer="75" from-port="2" to-layer="84" to-port="0" />
<edge from-layer="76" from-port="0" to-layer="83" to-port="0" />
<edge from-layer="77" from-port="0" to-layer="82" to-port="0" />
<edge from-layer="77" from-port="0" to-layer="81" to-port="0" />
<edge from-layer="78" from-port="0" to-layer="82" to-port="1" />
<edge from-layer="79" from-port="0" to-layer="80" to-port="1" />
<edge from-layer="80" from-port="2" to-layer="81" to-port="1" />
<edge from-layer="81" from-port="2" to-layer="82" to-port="2" />
<edge from-layer="82" from-port="3" to-layer="83" to-port="1" />
<edge from-layer="83" from-port="2" to-layer="84" to-port="1" />
<edge from-layer="84" from-port="2" to-layer="86" to-port="0" />
<edge from-layer="85" from-port="1" to-layer="86" to-port="1" />
<edge from-layer="86" from-port="2" to-layer="87" to-port="0" />
<edge from-layer="87" from-port="2" to-layer="88" to-port="0" />
<edge from-layer="88" from-port="2" to-layer="89" to-port="0" />
<edge from-layer="88" from-port="2" to-layer="90" to-port="0" />
<edge from-layer="89" from-port="1" to-layer="90" to-port="1" />
<edge from-layer="90" from-port="2" to-layer="92" to-port="0" />
<edge from-layer="91" from-port="0" to-layer="92" to-port="1" />
<edge from-layer="92" from-port="2" to-layer="96" to-port="0" />
<edge from-layer="92" from-port="2" to-layer="103" to-port="0" />
<edge from-layer="93" from-port="0" to-layer="102" to-port="0" />
<edge from-layer="94" from-port="0" to-layer="100" to-port="0" />
<edge from-layer="94" from-port="0" to-layer="101" to-port="0" />
<edge from-layer="95" from-port="0" to-layer="101" to-port="1" />
<edge from-layer="96" from-port="1" to-layer="97" to-port="0" />
<edge from-layer="97" from-port="1" to-layer="99" to-port="0" />
<edge from-layer="98" from-port="0" to-layer="99" to-port="1" />
<edge from-layer="99" from-port="2" to-layer="100" to-port="1" />
<edge from-layer="100" from-port="2" to-layer="101" to-port="2" />
<edge from-layer="101" from-port="3" to-layer="102" to-port="1" />
<edge from-layer="102" from-port="2" to-layer="103" to-port="1" />
<edge from-layer="103" from-port="2" to-layer="104" to-port="1" />
<edge from-layer="104" from-port="2" to-layer="106" to-port="0" />
<edge from-layer="105" from-port="0" to-layer="106" to-port="1" />
<edge from-layer="106" from-port="2" to-layer="127" to-port="0" />
<edge from-layer="107" from-port="0" to-layer="111" to-port="0" />
<edge from-layer="108" from-port="0" to-layer="110" to-port="0" />
<edge from-layer="109" from-port="0" to-layer="110" to-port="1" />
<edge from-layer="110" from-port="2" to-layer="111" to-port="1" />
<edge from-layer="111" from-port="2" to-layer="113" to-port="0" />
<edge from-layer="112" from-port="0" to-layer="113" to-port="1" />
<edge from-layer="113" from-port="2" to-layer="118" to-port="0" />
<edge from-layer="114" from-port="0" to-layer="118" to-port="1" />
<edge from-layer="115" from-port="0" to-layer="118" to-port="2" />
<edge from-layer="116" from-port="0" to-layer="118" to-port="3" />
<edge from-layer="117" from-port="0" to-layer="118" to-port="4" />
<edge from-layer="118" from-port="5" to-layer="120" to-port="0" />
<edge from-layer="119" from-port="0" to-layer="120" to-port="1" />
<edge from-layer="120" from-port="2" to-layer="122" to-port="0" />
<edge from-layer="121" from-port="0" to-layer="122" to-port="1" />
<edge from-layer="122" from-port="2" to-layer="123" to-port="0" />
<edge from-layer="123" from-port="1" to-layer="125" to-port="0" />
<edge from-layer="124" from-port="0" to-layer="125" to-port="1" />
<edge from-layer="125" from-port="3" to-layer="127" to-port="2" />
<edge from-layer="125" from-port="2" to-layer="127" to-port="1" />
<edge from-layer="126" from-port="0" to-layer="127" to-port="3" />
<edge from-layer="127" from-port="4" to-layer="129" to-port="0" />
<edge from-layer="128" from-port="0" to-layer="129" to-port="1" />
<edge from-layer="129" from-port="2" to-layer="133" to-port="0" />
<edge from-layer="129" from-port="2" to-layer="140" to-port="0" />
<edge from-layer="130" from-port="0" to-layer="139" to-port="0" />
<edge from-layer="131" from-port="0" to-layer="137" to-port="0" />
<edge from-layer="131" from-port="0" to-layer="138" to-port="0" />
<edge from-layer="132" from-port="0" to-layer="138" to-port="1" />
<edge from-layer="133" from-port="1" to-layer="134" to-port="0" />
<edge from-layer="134" from-port="1" to-layer="136" to-port="0" />
<edge from-layer="135" from-port="0" to-layer="136" to-port="1" />
<edge from-layer="136" from-port="2" to-layer="137" to-port="1" />
<edge from-layer="137" from-port="2" to-layer="138" to-port="2" />
<edge from-layer="138" from-port="3" to-layer="139" to-port="1" />
<edge from-layer="139" from-port="2" to-layer="140" to-port="1" />
<edge from-layer="140" from-port="2" to-layer="142" to-port="0" />
<edge from-layer="140" from-port="2" to-layer="178" to-port="0" />
<edge from-layer="140" from-port="2" to-layer="155" to-port="0" />
<edge from-layer="141" from-port="0" to-layer="142" to-port="1" />
<edge from-layer="142" from-port="2" to-layer="153" to-port="0" />
<edge from-layer="142" from-port="2" to-layer="146" to-port="0" />
<edge from-layer="143" from-port="0" to-layer="152" to-port="0" />
<edge from-layer="144" from-port="0" to-layer="151" to-port="0" />
<edge from-layer="144" from-port="0" to-layer="150" to-port="0" />
<edge from-layer="145" from-port="0" to-layer="151" to-port="1" />
<edge from-layer="146" from-port="1" to-layer="147" to-port="0" />
<edge from-layer="147" from-port="1" to-layer="149" to-port="0" />
<edge from-layer="148" from-port="0" to-layer="149" to-port="1" />
<edge from-layer="149" from-port="2" to-layer="150" to-port="1" />
<edge from-layer="150" from-port="2" to-layer="151" to-port="2" />
<edge from-layer="151" from-port="3" to-layer="152" to-port="1" />
<edge from-layer="152" from-port="2" to-layer="153" to-port="1" />
<edge from-layer="153" from-port="2" to-layer="242" to-port="0" />
<edge from-layer="154" from-port="0" to-layer="155" to-port="1" />
<edge from-layer="155" from-port="2" to-layer="157" to-port="0" />
<edge from-layer="156" from-port="0" to-layer="157" to-port="1" />
<edge from-layer="157" from-port="2" to-layer="168" to-port="0" />
<edge from-layer="157" from-port="2" to-layer="161" to-port="0" />
<edge from-layer="158" from-port="0" to-layer="167" to-port="0" />
<edge from-layer="159" from-port="0" to-layer="166" to-port="0" />
<edge from-layer="159" from-port="0" to-layer="165" to-port="0" />
<edge from-layer="160" from-port="0" to-layer="166" to-port="1" />
<edge from-layer="161" from-port="1" to-layer="162" to-port="0" />
<edge from-layer="162" from-port="1" to-layer="173" to-port="0" />
<edge from-layer="162" from-port="1" to-layer="164" to-port="0" />
<edge from-layer="163" from-port="0" to-layer="164" to-port="1" />
<edge from-layer="164" from-port="2" to-layer="165" to-port="1" />
<edge from-layer="165" from-port="2" to-layer="166" to-port="2" />
<edge from-layer="166" from-port="3" to-layer="167" to-port="1" />
<edge from-layer="167" from-port="2" to-layer="168" to-port="1" />
<edge from-layer="168" from-port="2" to-layer="177" to-port="0" />
<edge from-layer="169" from-port="0" to-layer="176" to-port="0" />
<edge from-layer="170" from-port="0" to-layer="175" to-port="0" />
<edge from-layer="170" from-port="0" to-layer="174" to-port="0" />
<edge from-layer="171" from-port="0" to-layer="175" to-port="1" />
<edge from-layer="172" from-port="0" to-layer="173" to-port="1" />
<edge from-layer="173" from-port="2" to-layer="174" to-port="1" />
<edge from-layer="174" from-port="2" to-layer="175" to-port="2" />
<edge from-layer="175" from-port="3" to-layer="176" to-port="1" />
<edge from-layer="176" from-port="2" to-layer="177" to-port="1" />
<edge from-layer="177" from-port="2" to-layer="179" to-port="0" />
<edge from-layer="178" from-port="1" to-layer="179" to-port="1" />
<edge from-layer="179" from-port="2" to-layer="180" to-port="0" />
<edge from-layer="180" from-port="2" to-layer="181" to-port="0" />
<edge from-layer="181" from-port="2" to-layer="182" to-port="0" />
<edge from-layer="181" from-port="2" to-layer="183" to-port="0" />
<edge from-layer="182" from-port="1" to-layer="183" to-port="1" />
<edge from-layer="183" from-port="2" to-layer="185" to-port="0" />
<edge from-layer="184" from-port="0" to-layer="185" to-port="1" />
<edge from-layer="185" from-port="2" to-layer="189" to-port="0" />
<edge from-layer="185" from-port="2" to-layer="196" to-port="0" />
<edge from-layer="186" from-port="0" to-layer="195" to-port="0" />
<edge from-layer="187" from-port="0" to-layer="193" to-port="0" />
<edge from-layer="187" from-port="0" to-layer="194" to-port="0" />
<edge from-layer="188" from-port="0" to-layer="194" to-port="1" />
<edge from-layer="189" from-port="1" to-layer="190" to-port="0" />
<edge from-layer="190" from-port="1" to-layer="192" to-port="0" />
<edge from-layer="191" from-port="0" to-layer="192" to-port="1" />
<edge from-layer="192" from-port="2" to-layer="193" to-port="1" />
<edge from-layer="193" from-port="2" to-layer="194" to-port="2" />
<edge from-layer="194" from-port="3" to-layer="195" to-port="1" />
<edge from-layer="195" from-port="2" to-layer="196" to-port="1" />
<edge from-layer="196" from-port="2" to-layer="198" to-port="0" />
<edge from-layer="196" from-port="2" to-layer="221" to-port="0" />
<edge from-layer="197" from-port="0" to-layer="198" to-port="1" />
<edge from-layer="198" from-port="2" to-layer="200" to-port="0" />
<edge from-layer="199" from-port="0" to-layer="200" to-port="1" />
<edge from-layer="200" from-port="2" to-layer="204" to-port="0" />
<edge from-layer="200" from-port="2" to-layer="211" to-port="0" />
<edge from-layer="201" from-port="0" to-layer="210" to-port="0" />
<edge from-layer="202" from-port="0" to-layer="209" to-port="0" />
<edge from-layer="202" from-port="0" to-layer="208" to-port="0" />
<edge from-layer="203" from-port="0" to-layer="209" to-port="1" />
<edge from-layer="204" from-port="1" to-layer="205" to-port="0" />
<edge from-layer="205" from-port="1" to-layer="207" to-port="0" />
<edge from-layer="205" from-port="1" to-layer="216" to-port="0" />
<edge from-layer="206" from-port="0" to-layer="207" to-port="1" />
<edge from-layer="207" from-port="2" to-layer="208" to-port="1" />
<edge from-layer="208" from-port="2" to-layer="209" to-port="2" />
<edge from-layer="209" from-port="3" to-layer="210" to-port="1" />
<edge from-layer="210" from-port="2" to-layer="211" to-port="1" />
<edge from-layer="211" from-port="2" to-layer="220" to-port="0" />
<edge from-layer="212" from-port="0" to-layer="219" to-port="0" />
<edge from-layer="213" from-port="0" to-layer="217" to-port="0" />
<edge from-layer="213" from-port="0" to-layer="218" to-port="0" />
<edge from-layer="214" from-port="0" to-layer="218" to-port="1" />
<edge from-layer="215" from-port="0" to-layer="216" to-port="1" />
<edge from-layer="216" from-port="2" to-layer="217" to-port="1" />
<edge from-layer="217" from-port="2" to-layer="218" to-port="2" />
<edge from-layer="218" from-port="3" to-layer="219" to-port="1" />
<edge from-layer="219" from-port="2" to-layer="220" to-port="1" />
<edge from-layer="220" from-port="2" to-layer="222" to-port="0" />
<edge from-layer="221" from-port="1" to-layer="222" to-port="1" />
<edge from-layer="222" from-port="2" to-layer="224" to-port="0" />
<edge from-layer="223" from-port="0" to-layer="224" to-port="1" />
<edge from-layer="223" from-port="0" to-layer="314" to-port="1" />
<edge from-layer="223" from-port="0" to-layer="621" to-port="1" />
<edge from-layer="223" from-port="0" to-layer="664" to-port="1" />
<edge from-layer="223" from-port="0" to-layer="710" to-port="1" />
<edge from-layer="223" from-port="0" to-layer="271" to-port="1" />
<edge from-layer="224" from-port="2" to-layer="226" to-port="0" />
<edge from-layer="225" from-port="0" to-layer="272" to-port="1" />
<edge from-layer="225" from-port="0" to-layer="315" to-port="1" />
<edge from-layer="225" from-port="0" to-layer="226" to-port="1" />
<edge from-layer="225" from-port="0" to-layer="622" to-port="1" />
<edge from-layer="225" from-port="0" to-layer="665" to-port="1" />
<edge from-layer="225" from-port="0" to-layer="711" to-port="1" />
<edge from-layer="226" from-port="2" to-layer="228" to-port="0" />
<edge from-layer="226" from-port="2" to-layer="227" to-port="0" />
<edge from-layer="227" from-port="1" to-layer="228" to-port="1" />
<edge from-layer="228" from-port="2" to-layer="230" to-port="0" />
<edge from-layer="229" from-port="0" to-layer="230" to-port="1" />
<edge from-layer="230" from-port="2" to-layer="241" to-port="0" />
<edge from-layer="230" from-port="2" to-layer="234" to-port="0" />
<edge from-layer="231" from-port="0" to-layer="240" to-port="0" />
<edge from-layer="232" from-port="0" to-layer="239" to-port="0" />
<edge from-layer="232" from-port="0" to-layer="238" to-port="0" />
<edge from-layer="233" from-port="0" to-layer="239" to-port="1" />
<edge from-layer="234" from-port="1" to-layer="235" to-port="0" />
<edge from-layer="235" from-port="1" to-layer="237" to-port="0" />
<edge from-layer="236" from-port="0" to-layer="237" to-port="1" />
<edge from-layer="237" from-port="2" to-layer="238" to-port="1" />
<edge from-layer="238" from-port="2" to-layer="239" to-port="2" />
<edge from-layer="239" from-port="3" to-layer="240" to-port="1" />
<edge from-layer="240" from-port="2" to-layer="241" to-port="1" />
<edge from-layer="241" from-port="2" to-layer="242" to-port="1" />
<edge from-layer="242" from-port="2" to-layer="244" to-port="0" />
<edge from-layer="243" from-port="0" to-layer="244" to-port="1" />
<edge from-layer="244" from-port="2" to-layer="331" to-port="0" />
<edge from-layer="244" from-port="2" to-layer="269" to-port="0" />
<edge from-layer="244" from-port="2" to-layer="246" to-port="0" />
<edge from-layer="245" from-port="0" to-layer="246" to-port="1" />
<edge from-layer="246" from-port="2" to-layer="248" to-port="0" />
<edge from-layer="247" from-port="0" to-layer="248" to-port="1" />
<edge from-layer="248" from-port="2" to-layer="259" to-port="0" />
<edge from-layer="248" from-port="2" to-layer="252" to-port="0" />
<edge from-layer="249" from-port="0" to-layer="258" to-port="0" />
<edge from-layer="250" from-port="0" to-layer="257" to-port="0" />
<edge from-layer="250" from-port="0" to-layer="256" to-port="0" />
<edge from-layer="251" from-port="0" to-layer="257" to-port="1" />
<edge from-layer="252" from-port="1" to-layer="253" to-port="0" />
<edge from-layer="253" from-port="1" to-layer="264" to-port="0" />
<edge from-layer="253" from-port="1" to-layer="255" to-port="0" />
<edge from-layer="254" from-port="0" to-layer="255" to-port="1" />
<edge from-layer="255" from-port="2" to-layer="256" to-port="1" />
<edge from-layer="256" from-port="2" to-layer="257" to-port="2" />
<edge from-layer="257" from-port="3" to-layer="258" to-port="1" />
<edge from-layer="258" from-port="2" to-layer="259" to-port="1" />
<edge from-layer="259" from-port="2" to-layer="268" to-port="0" />
<edge from-layer="260" from-port="0" to-layer="267" to-port="0" />
<edge from-layer="261" from-port="0" to-layer="266" to-port="0" />
<edge from-layer="261" from-port="0" to-layer="265" to-port="0" />
<edge from-layer="262" from-port="0" to-layer="266" to-port="1" />
<edge from-layer="263" from-port="0" to-layer="264" to-port="1" />
<edge from-layer="264" from-port="2" to-layer="265" to-port="1" />
<edge from-layer="265" from-port="2" to-layer="266" to-port="2" />
<edge from-layer="266" from-port="3" to-layer="267" to-port="1" />
<edge from-layer="267" from-port="2" to-layer="268" to-port="1" />
<edge from-layer="268" from-port="2" to-layer="270" to-port="0" />
<edge from-layer="269" from-port="1" to-layer="270" to-port="1" />
<edge from-layer="270" from-port="2" to-layer="271" to-port="0" />
<edge from-layer="271" from-port="2" to-layer="272" to-port="0" />
<edge from-layer="272" from-port="2" to-layer="273" to-port="0" />
<edge from-layer="272" from-port="2" to-layer="274" to-port="0" />
<edge from-layer="273" from-port="1" to-layer="274" to-port="1" />
<edge from-layer="274" from-port="2" to-layer="276" to-port="0" />
<edge from-layer="275" from-port="0" to-layer="276" to-port="1" />
<edge from-layer="276" from-port="2" to-layer="280" to-port="0" />
<edge from-layer="276" from-port="2" to-layer="287" to-port="0" />
<edge from-layer="277" from-port="0" to-layer="286" to-port="0" />
<edge from-layer="278" from-port="0" to-layer="285" to-port="0" />
<edge from-layer="278" from-port="0" to-layer="284" to-port="0" />
<edge from-layer="279" from-port="0" to-layer="285" to-port="1" />
<edge from-layer="280" from-port="1" to-layer="281" to-port="0" />
<edge from-layer="281" from-port="1" to-layer="283" to-port="0" />
<edge from-layer="282" from-port="0" to-layer="283" to-port="1" />
<edge from-layer="283" from-port="2" to-layer="284" to-port="1" />
<edge from-layer="284" from-port="2" to-layer="285" to-port="2" />
<edge from-layer="285" from-port="3" to-layer="286" to-port="1" />
<edge from-layer="286" from-port="2" to-layer="287" to-port="1" />
<edge from-layer="287" from-port="2" to-layer="312" to-port="0" />
<edge from-layer="287" from-port="2" to-layer="289" to-port="0" />
<edge from-layer="288" from-port="0" to-layer="289" to-port="1" />
<edge from-layer="289" from-port="2" to-layer="291" to-port="0" />
<edge from-layer="290" from-port="0" to-layer="291" to-port="1" />
<edge from-layer="291" from-port="2" to-layer="302" to-port="0" />
<edge from-layer="291" from-port="2" to-layer="295" to-port="0" />
<edge from-layer="292" from-port="0" to-layer="301" to-port="0" />
<edge from-layer="293" from-port="0" to-layer="300" to-port="0" />
<edge from-layer="293" from-port="0" to-layer="299" to-port="0" />
<edge from-layer="294" from-port="0" to-layer="300" to-port="1" />
<edge from-layer="295" from-port="1" to-layer="296" to-port="0" />
<edge from-layer="296" from-port="1" to-layer="298" to-port="0" />
<edge from-layer="296" from-port="1" to-layer="307" to-port="0" />
<edge from-layer="297" from-port="0" to-layer="298" to-port="1" />
<edge from-layer="298" from-port="2" to-layer="299" to-port="1" />
<edge from-layer="299" from-port="2" to-layer="300" to-port="2" />
<edge from-layer="300" from-port="3" to-layer="301" to-port="1" />
<edge from-layer="301" from-port="2" to-layer="302" to-port="1" />
<edge from-layer="302" from-port="2" to-layer="311" to-port="0" />
<edge from-layer="303" from-port="0" to-layer="310" to-port="0" />
<edge from-layer="304" from-port="0" to-layer="309" to-port="0" />
<edge from-layer="304" from-port="0" to-layer="308" to-port="0" />
<edge from-layer="305" from-port="0" to-layer="309" to-port="1" />
<edge from-layer="306" from-port="0" to-layer="307" to-port="1" />
<edge from-layer="307" from-port="2" to-layer="308" to-port="1" />
<edge from-layer="308" from-port="2" to-layer="309" to-port="2" />
<edge from-layer="309" from-port="3" to-layer="310" to-port="1" />
<edge from-layer="310" from-port="2" to-layer="311" to-port="1" />
<edge from-layer="311" from-port="2" to-layer="313" to-port="0" />
<edge from-layer="312" from-port="1" to-layer="313" to-port="1" />
<edge from-layer="313" from-port="2" to-layer="314" to-port="0" />
<edge from-layer="314" from-port="2" to-layer="315" to-port="0" />
<edge from-layer="315" from-port="2" to-layer="317" to-port="0" />
<edge from-layer="315" from-port="2" to-layer="316" to-port="0" />
<edge from-layer="316" from-port="1" to-layer="317" to-port="1" />
<edge from-layer="317" from-port="2" to-layer="319" to-port="0" />
<edge from-layer="318" from-port="0" to-layer="319" to-port="1" />
<edge from-layer="319" from-port="2" to-layer="323" to-port="0" />
<edge from-layer="319" from-port="2" to-layer="330" to-port="0" />
<edge from-layer="320" from-port="0" to-layer="329" to-port="0" />
<edge from-layer="321" from-port="0" to-layer="327" to-port="0" />
<edge from-layer="321" from-port="0" to-layer="328" to-port="0" />
<edge from-layer="322" from-port="0" to-layer="328" to-port="1" />
<edge from-layer="323" from-port="1" to-layer="324" to-port="0" />
<edge from-layer="324" from-port="1" to-layer="326" to-port="0" />
<edge from-layer="325" from-port="0" to-layer="326" to-port="1" />
<edge from-layer="326" from-port="2" to-layer="327" to-port="1" />
<edge from-layer="327" from-port="2" to-layer="328" to-port="2" />
<edge from-layer="328" from-port="3" to-layer="329" to-port="1" />
<edge from-layer="329" from-port="2" to-layer="330" to-port="1" />
<edge from-layer="330" from-port="2" to-layer="331" to-port="1" />
<edge from-layer="331" from-port="2" to-layer="333" to-port="0" />
<edge from-layer="332" from-port="0" to-layer="333" to-port="1" />
<edge from-layer="333" from-port="2" to-layer="334" to-port="0" />
<edge from-layer="333" from-port="2" to-layer="592" to-port="1" />
<edge from-layer="333" from-port="2" to-layer="358" to-port="0" />
<edge from-layer="333" from-port="2" to-layer="350" to-port="0" />
<edge from-layer="333" from-port="2" to-layer="346" to-port="0" />
<edge from-layer="333" from-port="2" to-layer="340" to-port="0" />
<edge from-layer="334" from-port="1" to-layer="337" to-port="0" />
<edge from-layer="335" from-port="0" to-layer="337" to-port="1" />
<edge from-layer="336" from-port="0" to-layer="337" to-port="2" />
<edge from-layer="337" from-port="3" to-layer="339" to-port="0" />
<edge from-layer="338" from-port="0" to-layer="339" to-port="1" />
<edge from-layer="339" from-port="2" to-layer="357" to-port="0" />
<edge from-layer="340" from-port="1" to-layer="343" to-port="0" />
<edge from-layer="341" from-port="0" to-layer="343" to-port="1" />
<edge from-layer="342" from-port="0" to-layer="343" to-port="2" />
<edge from-layer="343" from-port="3" to-layer="585" to-port="0" />
<edge from-layer="343" from-port="3" to-layer="345" to-port="0" />
<edge from-layer="344" from-port="0" to-layer="345" to-port="1" />
<edge from-layer="345" from-port="2" to-layer="357" to-port="1" />
<edge from-layer="346" from-port="1" to-layer="349" to-port="0" />
<edge from-layer="347" from-port="0" to-layer="349" to-port="1" />
<edge from-layer="348" from-port="0" to-layer="349" to-port="2" />
<edge from-layer="349" from-port="3" to-layer="354" to-port="0" />
<edge from-layer="349" from-port="3" to-layer="587" to-port="0" />
<edge from-layer="350" from-port="1" to-layer="353" to-port="0" />
<edge from-layer="351" from-port="0" to-layer="353" to-port="1" />
<edge from-layer="352" from-port="0" to-layer="353" to-port="2" />
<edge from-layer="353" from-port="3" to-layer="589" to-port="0" />
<edge from-layer="353" from-port="3" to-layer="354" to-port="1" />
<edge from-layer="354" from-port="2" to-layer="356" to-port="0" />
<edge from-layer="355" from-port="0" to-layer="356" to-port="1" />
<edge from-layer="356" from-port="2" to-layer="357" to-port="2" />
<edge from-layer="357" from-port="3" to-layer="358" to-port="1" />
<edge from-layer="358" from-port="2" to-layer="383" to-port="0" />
<edge from-layer="358" from-port="2" to-layer="360" to-port="0" />
<edge from-layer="358" from-port="2" to-layer="577" to-port="0" />
<edge from-layer="359" from-port="0" to-layer="360" to-port="1" />
<edge from-layer="360" from-port="2" to-layer="362" to-port="0" />
<edge from-layer="361" from-port="0" to-layer="362" to-port="1" />
<edge from-layer="362" from-port="2" to-layer="366" to-port="0" />
<edge from-layer="362" from-port="2" to-layer="373" to-port="0" />
<edge from-layer="363" from-port="0" to-layer="372" to-port="0" />
<edge from-layer="364" from-port="0" to-layer="370" to-port="0" />
<edge from-layer="364" from-port="0" to-layer="371" to-port="0" />
<edge from-layer="365" from-port="0" to-layer="371" to-port="1" />
<edge from-layer="366" from-port="1" to-layer="367" to-port="0" />
<edge from-layer="367" from-port="1" to-layer="378" to-port="0" />
<edge from-layer="367" from-port="1" to-layer="369" to-port="0" />
<edge from-layer="368" from-port="0" to-layer="369" to-port="1" />
<edge from-layer="369" from-port="2" to-layer="370" to-port="1" />
<edge from-layer="370" from-port="2" to-layer="371" to-port="2" />
<edge from-layer="371" from-port="3" to-layer="372" to-port="1" />
<edge from-layer="372" from-port="2" to-layer="373" to-port="1" />
<edge from-layer="373" from-port="2" to-layer="382" to-port="0" />
<edge from-layer="374" from-port="0" to-layer="381" to-port="0" />
<edge from-layer="375" from-port="0" to-layer="379" to-port="0" />
<edge from-layer="375" from-port="0" to-layer="380" to-port="0" />
<edge from-layer="376" from-port="0" to-layer="380" to-port="1" />
<edge from-layer="377" from-port="0" to-layer="378" to-port="1" />
<edge from-layer="378" from-port="2" to-layer="379" to-port="1" />
<edge from-layer="379" from-port="2" to-layer="380" to-port="2" />
<edge from-layer="380" from-port="3" to-layer="381" to-port="1" />
<edge from-layer="381" from-port="2" to-layer="382" to-port="1" />
<edge from-layer="382" from-port="2" to-layer="384" to-port="0" />
<edge from-layer="383" from-port="1" to-layer="384" to-port="1" />
<edge from-layer="384" from-port="2" to-layer="386" to-port="0" />
<edge from-layer="385" from-port="0" to-layer="386" to-port="1" />
<edge from-layer="386" from-port="2" to-layer="388" to-port="0" />
<edge from-layer="387" from-port="0" to-layer="388" to-port="1" />
<edge from-layer="388" from-port="2" to-layer="429" to-port="0" />
<edge from-layer="388" from-port="2" to-layer="498" to-port="0" />
<edge from-layer="388" from-port="2" to-layer="390" to-port="0" />
<edge from-layer="389" from-port="0" to-layer="390" to-port="1" />
<edge from-layer="390" from-port="2" to-layer="391" to-port="1" />
<edge from-layer="391" from-port="2" to-layer="405" to-port="0" />
<edge from-layer="391" from-port="2" to-layer="392" to-port="0" />
<edge from-layer="391" from-port="2" to-layer="398" to-port="0" />
<edge from-layer="391" from-port="2" to-layer="416" to-port="0" />
<edge from-layer="392" from-port="1" to-layer="395" to-port="0" />
<edge from-layer="393" from-port="0" to-layer="395" to-port="1" />
<edge from-layer="394" from-port="0" to-layer="395" to-port="2" />
<edge from-layer="395" from-port="3" to-layer="397" to-port="0" />
<edge from-layer="395" from-port="3" to-layer="420" to-port="0" />
<edge from-layer="396" from-port="0" to-layer="397" to-port="1" />
<edge from-layer="397" from-port="2" to-layer="415" to-port="0" />
<edge from-layer="398" from-port="1" to-layer="401" to-port="0" />
<edge from-layer="399" from-port="0" to-layer="401" to-port="1" />
<edge from-layer="400" from-port="0" to-layer="401" to-port="2" />
<edge from-layer="401" from-port="3" to-layer="403" to-port="0" />
<edge from-layer="401" from-port="3" to-layer="422" to-port="0" />
<edge from-layer="402" from-port="0" to-layer="403" to-port="1" />
<edge from-layer="403" from-port="2" to-layer="415" to-port="1" />
<edge from-layer="404" from-port="0" to-layer="415" to-port="2" />
<edge from-layer="405" from-port="1" to-layer="408" to-port="0" />
<edge from-layer="406" from-port="0" to-layer="408" to-port="1" />
<edge from-layer="407" from-port="0" to-layer="408" to-port="2" />
<edge from-layer="408" from-port="3" to-layer="410" to-port="0" />
<edge from-layer="409" from-port="0" to-layer="410" to-port="1" />
<edge from-layer="410" from-port="2" to-layer="411" to-port="0" />
<edge from-layer="411" from-port="1" to-layer="412" to-port="0" />
<edge from-layer="412" from-port="1" to-layer="424" to-port="0" />
<edge from-layer="412" from-port="1" to-layer="414" to-port="0" />
<edge from-layer="413" from-port="0" to-layer="414" to-port="1" />
<edge from-layer="414" from-port="2" to-layer="415" to-port="3" />
<edge from-layer="415" from-port="4" to-layer="416" to-port="1" />
<edge from-layer="416" from-port="2" to-layer="418" to-port="0" />
<edge from-layer="417" from-port="0" to-layer="418" to-port="1" />
<edge from-layer="418" from-port="2" to-layer="426" to-port="0" />
<edge from-layer="419" from-port="0" to-layer="420" to-port="1" />
<edge from-layer="420" from-port="2" to-layer="425" to-port="0" />
<edge from-layer="421" from-port="0" to-layer="422" to-port="1" />
<edge from-layer="422" from-port="2" to-layer="425" to-port="1" />
<edge from-layer="423" from-port="0" to-layer="424" to-port="1" />
<edge from-layer="424" from-port="2" to-layer="425" to-port="2" />
<edge from-layer="425" from-port="3" to-layer="426" to-port="1" />
<edge from-layer="426" from-port="2" to-layer="476" to-port="0" />
<edge from-layer="426" from-port="2" to-layer="470" to-port="0" />
<edge from-layer="426" from-port="2" to-layer="466" to-port="0" />
<edge from-layer="427" from-port="0" to-layer="430" to-port="0" />
<edge from-layer="428" from-port="0" to-layer="429" to-port="1" />
<edge from-layer="429" from-port="2" to-layer="430" to-port="1" />
<edge from-layer="430" from-port="2" to-layer="444" to-port="0" />
<edge from-layer="430" from-port="2" to-layer="431" to-port="0" />
<edge from-layer="430" from-port="2" to-layer="437" to-port="0" />
<edge from-layer="430" from-port="2" to-layer="455" to-port="0" />
<edge from-layer="431" from-port="1" to-layer="434" to-port="0" />
<edge from-layer="432" from-port="0" to-layer="434" to-port="1" />
<edge from-layer="433" from-port="0" to-layer="434" to-port="2" />
<edge from-layer="434" from-port="3" to-layer="459" to-port="0" />
<edge from-layer="434" from-port="3" to-layer="436" to-port="0" />
<edge from-layer="435" from-port="0" to-layer="436" to-port="1" />
<edge from-layer="436" from-port="2" to-layer="454" to-port="0" />
<edge from-layer="437" from-port="1" to-layer="440" to-port="0" />
<edge from-layer="438" from-port="0" to-layer="440" to-port="1" />
<edge from-layer="439" from-port="0" to-layer="440" to-port="2" />
<edge from-layer="440" from-port="3" to-layer="461" to-port="0" />
<edge from-layer="440" from-port="3" to-layer="442" to-port="0" />
<edge from-layer="441" from-port="0" to-layer="442" to-port="1" />
<edge from-layer="442" from-port="2" to-layer="454" to-port="1" />
<edge from-layer="443" from-port="0" to-layer="454" to-port="2" />
<edge from-layer="444" from-port="1" to-layer="447" to-port="0" />
<edge from-layer="445" from-port="0" to-layer="447" to-port="1" />
<edge from-layer="446" from-port="0" to-layer="447" to-port="2" />
<edge from-layer="447" from-port="3" to-layer="449" to-port="0" />
<edge from-layer="448" from-port="0" to-layer="449" to-port="1" />
<edge from-layer="449" from-port="2" to-layer="450" to-port="0" />
<edge from-layer="450" from-port="1" to-layer="451" to-port="0" />
<edge from-layer="451" from-port="1" to-layer="453" to-port="0" />
<edge from-layer="451" from-port="1" to-layer="463" to-port="0" />
<edge from-layer="452" from-port="0" to-layer="453" to-port="1" />
<edge from-layer="453" from-port="2" to-layer="454" to-port="3" />
<edge from-layer="454" from-port="4" to-layer="455" to-port="1" />
<edge from-layer="455" from-port="2" to-layer="457" to-port="0" />
<edge from-layer="456" from-port="0" to-layer="457" to-port="1" />
<edge from-layer="457" from-port="2" to-layer="465" to-port="0" />
<edge from-layer="458" from-port="0" to-layer="459" to-port="1" />
<edge from-layer="459" from-port="2" to-layer="464" to-port="0" />
<edge from-layer="460" from-port="0" to-layer="461" to-port="1" />
<edge from-layer="461" from-port="2" to-layer="464" to-port="1" />
<edge from-layer="462" from-port="0" to-layer="463" to-port="1" />
<edge from-layer="463" from-port="2" to-layer="464" to-port="2" />
<edge from-layer="464" from-port="3" to-layer="465" to-port="1" />
<edge from-layer="465" from-port="2" to-layer="482" to-port="0" />
<edge from-layer="465" from-port="2" to-layer="466" to-port="1" />
<edge from-layer="466" from-port="2" to-layer="468" to-port="0" />
<edge from-layer="467" from-port="0" to-layer="468" to-port="1" />
<edge from-layer="468" from-port="2" to-layer="492" to-port="0" />
<edge from-layer="469" from-port="0" to-layer="489" to-port="0" />
<edge from-layer="470" from-port="1" to-layer="473" to-port="0" />
<edge from-layer="471" from-port="0" to-layer="473" to-port="1" />
<edge from-layer="472" from-port="0" to-layer="473" to-port="2" />
<edge from-layer="473" from-port="3" to-layer="475" to-port="0" />
<edge from-layer="474" from-port="0" to-layer="475" to-port="1" />
<edge from-layer="475" from-port="2" to-layer="488" to-port="0" />
<edge from-layer="476" from-port="1" to-layer="479" to-port="0" />
<edge from-layer="477" from-port="0" to-layer="479" to-port="1" />
<edge from-layer="478" from-port="0" to-layer="479" to-port="2" />
<edge from-layer="479" from-port="3" to-layer="481" to-port="0" />
<edge from-layer="480" from-port="0" to-layer="481" to-port="1" />
<edge from-layer="481" from-port="2" to-layer="488" to-port="1" />
<edge from-layer="482" from-port="1" to-layer="485" to-port="0" />
<edge from-layer="483" from-port="0" to-layer="485" to-port="1" />
<edge from-layer="484" from-port="0" to-layer="485" to-port="2" />
<edge from-layer="485" from-port="3" to-layer="487" to-port="0" />
<edge from-layer="486" from-port="0" to-layer="487" to-port="1" />
<edge from-layer="487" from-port="2" to-layer="488" to-port="2" />
<edge from-layer="488" from-port="3" to-layer="489" to-port="1" />
<edge from-layer="489" from-port="2" to-layer="491" to-port="0" />
<edge from-layer="490" from-port="0" to-layer="491" to-port="1" />
<edge from-layer="491" from-port="2" to-layer="492" to-port="1" />
<edge from-layer="492" from-port="2" to-layer="493" to-port="0" />
<edge from-layer="493" from-port="1" to-layer="494" to-port="0" />
<edge from-layer="494" from-port="1" to-layer="495" to-port="0" />
<edge from-layer="495" from-port="1" to-layer="535" to-port="0" />
<edge from-layer="496" from-port="0" to-layer="499" to-port="0" />
<edge from-layer="497" from-port="0" to-layer="498" to-port="1" />
<edge from-layer="498" from-port="2" to-layer="499" to-port="1" />
<edge from-layer="499" from-port="2" to-layer="500" to-port="0" />
<edge from-layer="499" from-port="2" to-layer="524" to-port="0" />
<edge from-layer="499" from-port="2" to-layer="513" to-port="0" />
<edge from-layer="499" from-port="2" to-layer="506" to-port="0" />
<edge from-layer="500" from-port="1" to-layer="503" to-port="0" />
<edge from-layer="501" from-port="0" to-layer="503" to-port="1" />
<edge from-layer="502" from-port="0" to-layer="503" to-port="2" />
<edge from-layer="503" from-port="3" to-layer="505" to-port="0" />
<edge from-layer="503" from-port="3" to-layer="528" to-port="0" />
<edge from-layer="504" from-port="0" to-layer="505" to-port="1" />
<edge from-layer="505" from-port="2" to-layer="523" to-port="0" />
<edge from-layer="506" from-port="1" to-layer="509" to-port="0" />
<edge from-layer="507" from-port="0" to-layer="509" to-port="1" />
<edge from-layer="508" from-port="0" to-layer="509" to-port="2" />
<edge from-layer="509" from-port="3" to-layer="511" to-port="0" />
<edge from-layer="509" from-port="3" to-layer="530" to-port="0" />
<edge from-layer="510" from-port="0" to-layer="511" to-port="1" />
<edge from-layer="511" from-port="2" to-layer="523" to-port="1" />
<edge from-layer="512" from-port="0" to-layer="523" to-port="2" />
<edge from-layer="513" from-port="1" to-layer="516" to-port="0" />
<edge from-layer="514" from-port="0" to-layer="516" to-port="1" />
<edge from-layer="515" from-port="0" to-layer="516" to-port="2" />
<edge from-layer="516" from-port="3" to-layer="518" to-port="0" />
<edge from-layer="517" from-port="0" to-layer="518" to-port="1" />
<edge from-layer="518" from-port="2" to-layer="519" to-port="0" />
<edge from-layer="519" from-port="1" to-layer="520" to-port="0" />
<edge from-layer="520" from-port="1" to-layer="532" to-port="0" />
<edge from-layer="520" from-port="1" to-layer="522" to-port="0" />
<edge from-layer="521" from-port="0" to-layer="522" to-port="1" />
<edge from-layer="522" from-port="2" to-layer="523" to-port="3" />
<edge from-layer="523" from-port="4" to-layer="524" to-port="1" />
<edge from-layer="524" from-port="2" to-layer="526" to-port="0" />
<edge from-layer="525" from-port="0" to-layer="526" to-port="1" />
<edge from-layer="526" from-port="2" to-layer="534" to-port="0" />
<edge from-layer="527" from-port="0" to-layer="528" to-port="1" />
<edge from-layer="528" from-port="2" to-layer="533" to-port="0" />
<edge from-layer="529" from-port="0" to-layer="530" to-port="1" />
<edge from-layer="530" from-port="2" to-layer="533" to-port="1" />
<edge from-layer="531" from-port="0" to-layer="532" to-port="1" />
<edge from-layer="532" from-port="2" to-layer="533" to-port="2" />
<edge from-layer="533" from-port="3" to-layer="534" to-port="1" />
<edge from-layer="534" from-port="2" to-layer="535" to-port="1" />
<edge from-layer="535" from-port="2" to-layer="536" to-port="0" />
<edge from-layer="535" from-port="2" to-layer="547" to-port="0" />
<edge from-layer="535" from-port="2" to-layer="553" to-port="0" />
<edge from-layer="535" from-port="2" to-layer="560" to-port="0" />
<edge from-layer="536" from-port="1" to-layer="539" to-port="0" />
<edge from-layer="537" from-port="0" to-layer="539" to-port="1" />
<edge from-layer="538" from-port="0" to-layer="539" to-port="2" />
<edge from-layer="539" from-port="3" to-layer="541" to-port="0" />
<edge from-layer="540" from-port="0" to-layer="541" to-port="1" />
<edge from-layer="541" from-port="2" to-layer="542" to-port="0" />
<edge from-layer="542" from-port="1" to-layer="543" to-port="0" />
<edge from-layer="543" from-port="1" to-layer="545" to-port="0" />
<edge from-layer="543" from-port="1" to-layer="564" to-port="0" />
<edge from-layer="544" from-port="0" to-layer="545" to-port="1" />
<edge from-layer="545" from-port="2" to-layer="559" to-port="0" />
<edge from-layer="546" from-port="0" to-layer="559" to-port="1" />
<edge from-layer="547" from-port="1" to-layer="550" to-port="0" />
<edge from-layer="548" from-port="0" to-layer="550" to-port="1" />
<edge from-layer="549" from-port="0" to-layer="550" to-port="2" />
<edge from-layer="550" from-port="3" to-layer="566" to-port="0" />
<edge from-layer="550" from-port="3" to-layer="552" to-port="0" />
<edge from-layer="551" from-port="0" to-layer="552" to-port="1" />
<edge from-layer="552" from-port="2" to-layer="559" to-port="2" />
<edge from-layer="553" from-port="1" to-layer="556" to-port="0" />
<edge from-layer="554" from-port="0" to-layer="556" to-port="1" />
<edge from-layer="555" from-port="0" to-layer="556" to-port="2" />
<edge from-layer="556" from-port="3" to-layer="558" to-port="0" />
<edge from-layer="556" from-port="3" to-layer="568" to-port="0" />
<edge from-layer="557" from-port="0" to-layer="558" to-port="1" />
<edge from-layer="558" from-port="2" to-layer="559" to-port="3" />
<edge from-layer="559" from-port="4" to-layer="560" to-port="1" />
<edge from-layer="560" from-port="2" to-layer="562" to-port="0" />
<edge from-layer="561" from-port="0" to-layer="562" to-port="1" />
<edge from-layer="562" from-port="2" to-layer="570" to-port="0" />
<edge from-layer="563" from-port="0" to-layer="564" to-port="1" />
<edge from-layer="564" from-port="2" to-layer="569" to-port="0" />
<edge from-layer="565" from-port="0" to-layer="566" to-port="1" />
<edge from-layer="566" from-port="2" to-layer="569" to-port="1" />
<edge from-layer="567" from-port="0" to-layer="568" to-port="1" />
<edge from-layer="568" from-port="2" to-layer="569" to-port="2" />
<edge from-layer="569" from-port="3" to-layer="570" to-port="1" />
<edge from-layer="570" from-port="2" to-layer="572" to-port="0" />
<edge from-layer="571" from-port="0" to-layer="572" to-port="1" />
<edge from-layer="572" from-port="2" to-layer="573" to-port="1" />
<edge from-layer="573" from-port="2" to-layer="575" to-port="0" />
<edge from-layer="574" from-port="0" to-layer="575" to-port="1" />
<edge from-layer="575" from-port="2" to-layer="591" to-port="0" />
<edge from-layer="576" from-port="0" to-layer="577" to-port="1" />
<edge from-layer="577" from-port="2" to-layer="578" to-port="0" />
<edge from-layer="578" from-port="1" to-layer="581" to-port="0" />
<edge from-layer="579" from-port="0" to-layer="581" to-port="1" />
<edge from-layer="580" from-port="0" to-layer="581" to-port="2" />
<edge from-layer="581" from-port="3" to-layer="583" to-port="0" />
<edge from-layer="582" from-port="0" to-layer="583" to-port="1" />
<edge from-layer="583" from-port="2" to-layer="590" to-port="0" />
<edge from-layer="584" from-port="0" to-layer="585" to-port="1" />
<edge from-layer="585" from-port="2" to-layer="590" to-port="1" />
<edge from-layer="586" from-port="0" to-layer="587" to-port="1" />
<edge from-layer="587" from-port="2" to-layer="590" to-port="2" />
<edge from-layer="588" from-port="0" to-layer="589" to-port="1" />
<edge from-layer="589" from-port="2" to-layer="590" to-port="3" />
<edge from-layer="590" from-port="4" to-layer="591" to-port="1" />
<edge from-layer="591" from-port="2" to-layer="592" to-port="0" />
<edge from-layer="592" from-port="2" to-layer="594" to-port="0" />
<edge from-layer="593" from-port="0" to-layer="594" to-port="1" />
<edge from-layer="594" from-port="2" to-layer="596" to-port="0" />
<edge from-layer="594" from-port="2" to-layer="619" to-port="0" />
<edge from-layer="594" from-port="2" to-layer="681" to-port="0" />
<edge from-layer="595" from-port="0" to-layer="596" to-port="1" />
<edge from-layer="596" from-port="2" to-layer="598" to-port="0" />
<edge from-layer="597" from-port="0" to-layer="598" to-port="1" />
<edge from-layer="598" from-port="2" to-layer="609" to-port="0" />
<edge from-layer="598" from-port="2" to-layer="602" to-port="0" />
<edge from-layer="599" from-port="0" to-layer="608" to-port="0" />
<edge from-layer="600" from-port="0" to-layer="606" to-port="0" />
<edge from-layer="600" from-port="0" to-layer="607" to-port="0" />
<edge from-layer="601" from-port="0" to-layer="607" to-port="1" />
<edge from-layer="602" from-port="1" to-layer="603" to-port="0" />
<edge from-layer="603" from-port="1" to-layer="614" to-port="0" />
<edge from-layer="603" from-port="1" to-layer="605" to-port="0" />
<edge from-layer="604" from-port="0" to-layer="605" to-port="1" />
<edge from-layer="605" from-port="2" to-layer="606" to-port="1" />
<edge from-layer="606" from-port="2" to-layer="607" to-port="2" />
<edge from-layer="607" from-port="3" to-layer="608" to-port="1" />
<edge from-layer="608" from-port="2" to-layer="609" to-port="1" />
<edge from-layer="609" from-port="2" to-layer="618" to-port="0" />
<edge from-layer="610" from-port="0" to-layer="617" to-port="0" />
<edge from-layer="611" from-port="0" to-layer="615" to-port="0" />
<edge from-layer="611" from-port="0" to-layer="616" to-port="0" />
<edge from-layer="612" from-port="0" to-layer="616" to-port="1" />
<edge from-layer="613" from-port="0" to-layer="614" to-port="1" />
<edge from-layer="614" from-port="2" to-layer="615" to-port="1" />
<edge from-layer="615" from-port="2" to-layer="616" to-port="2" />
<edge from-layer="616" from-port="3" to-layer="617" to-port="1" />
<edge from-layer="617" from-port="2" to-layer="618" to-port="1" />
<edge from-layer="618" from-port="2" to-layer="620" to-port="0" />
<edge from-layer="619" from-port="1" to-layer="620" to-port="1" />
<edge from-layer="620" from-port="2" to-layer="621" to-port="0" />
<edge from-layer="621" from-port="2" to-layer="622" to-port="0" />
<edge from-layer="622" from-port="2" to-layer="623" to-port="0" />
<edge from-layer="622" from-port="2" to-layer="624" to-port="0" />
<edge from-layer="623" from-port="1" to-layer="624" to-port="1" />
<edge from-layer="624" from-port="2" to-layer="626" to-port="0" />
<edge from-layer="625" from-port="0" to-layer="626" to-port="1" />
<edge from-layer="626" from-port="2" to-layer="630" to-port="0" />
<edge from-layer="626" from-port="2" to-layer="637" to-port="0" />
<edge from-layer="627" from-port="0" to-layer="636" to-port="0" />
<edge from-layer="628" from-port="0" to-layer="634" to-port="0" />
<edge from-layer="628" from-port="0" to-layer="635" to-port="0" />
<edge from-layer="629" from-port="0" to-layer="635" to-port="1" />
<edge from-layer="630" from-port="1" to-layer="631" to-port="0" />
<edge from-layer="631" from-port="1" to-layer="633" to-port="0" />
<edge from-layer="632" from-port="0" to-layer="633" to-port="1" />
<edge from-layer="633" from-port="2" to-layer="634" to-port="1" />
<edge from-layer="634" from-port="2" to-layer="635" to-port="2" />
<edge from-layer="635" from-port="3" to-layer="636" to-port="1" />
<edge from-layer="636" from-port="2" to-layer="637" to-port="1" />
<edge from-layer="637" from-port="2" to-layer="639" to-port="0" />
<edge from-layer="637" from-port="2" to-layer="662" to-port="0" />
<edge from-layer="638" from-port="0" to-layer="639" to-port="1" />
<edge from-layer="639" from-port="2" to-layer="641" to-port="0" />
<edge from-layer="640" from-port="0" to-layer="641" to-port="1" />
<edge from-layer="641" from-port="2" to-layer="652" to-port="0" />
<edge from-layer="641" from-port="2" to-layer="645" to-port="0" />
<edge from-layer="642" from-port="0" to-layer="651" to-port="0" />
<edge from-layer="643" from-port="0" to-layer="650" to-port="0" />
<edge from-layer="643" from-port="0" to-layer="649" to-port="0" />
<edge from-layer="644" from-port="0" to-layer="650" to-port="1" />
<edge from-layer="645" from-port="1" to-layer="646" to-port="0" />
<edge from-layer="646" from-port="1" to-layer="657" to-port="0" />
<edge from-layer="646" from-port="1" to-layer="648" to-port="0" />
<edge from-layer="647" from-port="0" to-layer="648" to-port="1" />
<edge from-layer="648" from-port="2" to-layer="649" to-port="1" />
<edge from-layer="649" from-port="2" to-layer="650" to-port="2" />
<edge from-layer="650" from-port="3" to-layer="651" to-port="1" />
<edge from-layer="651" from-port="2" to-layer="652" to-port="1" />
<edge from-layer="652" from-port="2" to-layer="661" to-port="0" />
<edge from-layer="653" from-port="0" to-layer="660" to-port="0" />
<edge from-layer="654" from-port="0" to-layer="659" to-port="0" />
<edge from-layer="654" from-port="0" to-layer="658" to-port="0" />
<edge from-layer="655" from-port="0" to-layer="659" to-port="1" />
<edge from-layer="656" from-port="0" to-layer="657" to-port="1" />
<edge from-layer="657" from-port="2" to-layer="658" to-port="1" />
<edge from-layer="658" from-port="2" to-layer="659" to-port="2" />
<edge from-layer="659" from-port="3" to-layer="660" to-port="1" />
<edge from-layer="660" from-port="2" to-layer="661" to-port="1" />
<edge from-layer="661" from-port="2" to-layer="663" to-port="0" />
<edge from-layer="662" from-port="1" to-layer="663" to-port="1" />
<edge from-layer="663" from-port="2" to-layer="664" to-port="0" />
<edge from-layer="664" from-port="2" to-layer="665" to-port="0" />
<edge from-layer="665" from-port="2" to-layer="666" to-port="0" />
<edge from-layer="665" from-port="2" to-layer="667" to-port="0" />
<edge from-layer="666" from-port="1" to-layer="667" to-port="1" />
<edge from-layer="667" from-port="2" to-layer="669" to-port="0" />
<edge from-layer="668" from-port="0" to-layer="669" to-port="1" />
<edge from-layer="669" from-port="2" to-layer="680" to-port="0" />
<edge from-layer="669" from-port="2" to-layer="673" to-port="0" />
<edge from-layer="670" from-port="0" to-layer="679" to-port="0" />
<edge from-layer="671" from-port="0" to-layer="677" to-port="0" />
<edge from-layer="671" from-port="0" to-layer="678" to-port="0" />
<edge from-layer="672" from-port="0" to-layer="678" to-port="1" />
<edge from-layer="673" from-port="1" to-layer="674" to-port="0" />
<edge from-layer="674" from-port="1" to-layer="676" to-port="0" />
<edge from-layer="675" from-port="0" to-layer="676" to-port="1" />
<edge from-layer="676" from-port="2" to-layer="677" to-port="1" />
<edge from-layer="677" from-port="2" to-layer="678" to-port="2" />
<edge from-layer="678" from-port="3" to-layer="679" to-port="1" />
<edge from-layer="679" from-port="2" to-layer="680" to-port="1" />
<edge from-layer="680" from-port="2" to-layer="681" to-port="1" />
<edge from-layer="681" from-port="2" to-layer="683" to-port="0" />
<edge from-layer="682" from-port="0" to-layer="683" to-port="1" />
<edge from-layer="683" from-port="2" to-layer="685" to-port="0" />
<edge from-layer="683" from-port="2" to-layer="708" to-port="0" />
<edge from-layer="684" from-port="0" to-layer="685" to-port="1" />
<edge from-layer="685" from-port="2" to-layer="687" to-port="0" />
<edge from-layer="686" from-port="0" to-layer="687" to-port="1" />
<edge from-layer="687" from-port="2" to-layer="698" to-port="0" />
<edge from-layer="687" from-port="2" to-layer="691" to-port="0" />
<edge from-layer="688" from-port="0" to-layer="697" to-port="0" />
<edge from-layer="689" from-port="0" to-layer="696" to-port="0" />
<edge from-layer="689" from-port="0" to-layer="695" to-port="0" />
<edge from-layer="690" from-port="0" to-layer="696" to-port="1" />
<edge from-layer="691" from-port="1" to-layer="692" to-port="0" />
<edge from-layer="692" from-port="1" to-layer="703" to-port="0" />
<edge from-layer="692" from-port="1" to-layer="694" to-port="0" />
<edge from-layer="693" from-port="0" to-layer="694" to-port="1" />
<edge from-layer="694" from-port="2" to-layer="695" to-port="1" />
<edge from-layer="695" from-port="2" to-layer="696" to-port="2" />
<edge from-layer="696" from-port="3" to-layer="697" to-port="1" />
<edge from-layer="697" from-port="2" to-layer="698" to-port="1" />
<edge from-layer="698" from-port="2" to-layer="707" to-port="0" />
<edge from-layer="699" from-port="0" to-layer="706" to-port="0" />
<edge from-layer="700" from-port="0" to-layer="705" to-port="0" />
<edge from-layer="700" from-port="0" to-layer="704" to-port="0" />
<edge from-layer="701" from-port="0" to-layer="705" to-port="1" />
<edge from-layer="702" from-port="0" to-layer="703" to-port="1" />
<edge from-layer="703" from-port="2" to-layer="704" to-port="1" />
<edge from-layer="704" from-port="2" to-layer="705" to-port="2" />
<edge from-layer="705" from-port="3" to-layer="706" to-port="1" />
<edge from-layer="706" from-port="2" to-layer="707" to-port="1" />
<edge from-layer="707" from-port="2" to-layer="709" to-port="0" />
<edge from-layer="708" from-port="1" to-layer="709" to-port="1" />
<edge from-layer="709" from-port="2" to-layer="710" to-port="0" />
<edge from-layer="710" from-port="2" to-layer="711" to-port="0" />
<edge from-layer="711" from-port="2" to-layer="712" to-port="0" />
<edge from-layer="711" from-port="2" to-layer="713" to-port="0" />
<edge from-layer="712" from-port="1" to-layer="713" to-port="1" />
<edge from-layer="713" from-port="2" to-layer="715" to-port="0" />
<edge from-layer="714" from-port="0" to-layer="715" to-port="1" />
<edge from-layer="715" from-port="2" to-layer="719" to-port="0" />
<edge from-layer="715" from-port="2" to-layer="726" to-port="0" />
<edge from-layer="716" from-port="0" to-layer="725" to-port="0" />
<edge from-layer="717" from-port="0" to-layer="724" to-port="0" />
<edge from-layer="717" from-port="0" to-layer="723" to-port="0" />
<edge from-layer="718" from-port="0" to-layer="724" to-port="1" />
<edge from-layer="719" from-port="1" to-layer="720" to-port="0" />
<edge from-layer="720" from-port="1" to-layer="722" to-port="0" />
<edge from-layer="721" from-port="0" to-layer="722" to-port="1" />
<edge from-layer="722" from-port="2" to-layer="723" to-port="1" />
<edge from-layer="723" from-port="2" to-layer="724" to-port="2" />
<edge from-layer="724" from-port="3" to-layer="725" to-port="1" />
<edge from-layer="725" from-port="2" to-layer="726" to-port="1" />
<edge from-layer="726" from-port="2" to-layer="728" to-port="0" />
<edge from-layer="727" from-port="0" to-layer="728" to-port="1" />
<edge from-layer="728" from-port="2" to-layer="732" to-port="0" />
<edge from-layer="728" from-port="2" to-layer="739" to-port="0" />
<edge from-layer="729" from-port="0" to-layer="738" to-port="0" />
<edge from-layer="730" from-port="0" to-layer="736" to-port="0" />
<edge from-layer="730" from-port="0" to-layer="737" to-port="0" />
<edge from-layer="731" from-port="0" to-layer="737" to-port="1" />
<edge from-layer="732" from-port="1" to-layer="733" to-port="0" />
<edge from-layer="733" from-port="1" to-layer="735" to-port="0" />
<edge from-layer="734" from-port="0" to-layer="735" to-port="1" />
<edge from-layer="735" from-port="2" to-layer="736" to-port="1" />
<edge from-layer="736" from-port="2" to-layer="737" to-port="2" />
<edge from-layer="737" from-port="3" to-layer="738" to-port="1" />
<edge from-layer="738" from-port="2" to-layer="739" to-port="1" />
<edge from-layer="739" from-port="2" to-layer="741" to-port="0" />
<edge from-layer="739" from-port="2" to-layer="762" to-port="0" />
<edge from-layer="739" from-port="2" to-layer="754" to-port="0" />
<edge from-layer="740" from-port="0" to-layer="754" to-port="1" />
<edge from-layer="741" from-port="1" to-layer="744" to-port="0" />
<edge from-layer="742" from-port="0" to-layer="754" to-port="4" />
<edge from-layer="742" from-port="0" to-layer="762" to-port="4" />
<edge from-layer="742" from-port="0" to-layer="744" to-port="1" />
<edge from-layer="743" from-port="0" to-layer="744" to-port="2" />
<edge from-layer="744" from-port="3" to-layer="746" to-port="0" />
<edge from-layer="745" from-port="0" to-layer="746" to-port="1" />
<edge from-layer="746" from-port="2" to-layer="748" to-port="0" />
<edge from-layer="747" from-port="0" to-layer="748" to-port="1" />
<edge from-layer="748" from-port="2" to-layer="758" to-port="0" />
<edge from-layer="748" from-port="2" to-layer="750" to-port="0" />
<edge from-layer="749" from-port="0" to-layer="750" to-port="1" />
<edge from-layer="750" from-port="2" to-layer="754" to-port="2" />
<edge from-layer="750" from-port="2" to-layer="760" to-port="0" />
<edge from-layer="750" from-port="2" to-layer="762" to-port="1" />
<edge from-layer="751" from-port="0" to-layer="753" to-port="0" />
<edge from-layer="752" from-port="0" to-layer="753" to-port="1" />
<edge from-layer="753" from-port="2" to-layer="754" to-port="3" />
<edge from-layer="754" from-port="5" to-layer="815" to-port="0" />
<edge from-layer="754" from-port="5" to-layer="788" to-port="0" />
<edge from-layer="754" from-port="5" to-layer="782" to-port="0" />
<edge from-layer="754" from-port="5" to-layer="776" to-port="0" />
<edge from-layer="754" from-port="5" to-layer="770" to-port="0" />
<edge from-layer="755" from-port="0" to-layer="764" to-port="0" />
<edge from-layer="756" from-port="0" to-layer="763" to-port="0" />
<edge from-layer="757" from-port="0" to-layer="758" to-port="1" />
<edge from-layer="758" from-port="2" to-layer="762" to-port="2" />
<edge from-layer="759" from-port="0" to-layer="761" to-port="0" />
<edge from-layer="760" from-port="1" to-layer="761" to-port="1" />
<edge from-layer="761" from-port="2" to-layer="762" to-port="3" />
<edge from-layer="762" from-port="5" to-layer="763" to-port="1" />
<edge from-layer="763" from-port="2" to-layer="764" to-port="1" />
<edge from-layer="764" from-port="2" to-layer="766" to-port="0" />
<edge from-layer="765" from-port="0" to-layer="766" to-port="1" />
<edge from-layer="766" from-port="2" to-layer="767" to-port="0" />
<edge from-layer="767" from-port="1" to-layer="814" to-port="0" />
<edge from-layer="768" from-port="0" to-layer="810" to-port="0" />
<edge from-layer="769" from-port="0" to-layer="795" to-port="0" />
<edge from-layer="770" from-port="1" to-layer="773" to-port="0" />
<edge from-layer="771" from-port="0" to-layer="773" to-port="1" />
<edge from-layer="772" from-port="0" to-layer="773" to-port="2" />
<edge from-layer="773" from-port="3" to-layer="775" to-port="0" />
<edge from-layer="774" from-port="0" to-layer="775" to-port="1" />
<edge from-layer="775" from-port="2" to-layer="794" to-port="0" />
<edge from-layer="776" from-port="1" to-layer="779" to-port="0" />
<edge from-layer="777" from-port="0" to-layer="779" to-port="1" />
<edge from-layer="778" from-port="0" to-layer="779" to-port="2" />
<edge from-layer="779" from-port="3" to-layer="781" to-port="0" />
<edge from-layer="780" from-port="0" to-layer="781" to-port="1" />
<edge from-layer="781" from-port="2" to-layer="794" to-port="1" />
<edge from-layer="782" from-port="1" to-layer="785" to-port="0" />
<edge from-layer="783" from-port="0" to-layer="785" to-port="1" />
<edge from-layer="784" from-port="0" to-layer="785" to-port="2" />
<edge from-layer="785" from-port="3" to-layer="787" to-port="0" />
<edge from-layer="786" from-port="0" to-layer="787" to-port="1" />
<edge from-layer="787" from-port="2" to-layer="794" to-port="2" />
<edge from-layer="788" from-port="1" to-layer="791" to-port="0" />
<edge from-layer="789" from-port="0" to-layer="791" to-port="1" />
<edge from-layer="790" from-port="0" to-layer="791" to-port="2" />
<edge from-layer="791" from-port="3" to-layer="793" to-port="0" />
<edge from-layer="792" from-port="0" to-layer="793" to-port="1" />
<edge from-layer="793" from-port="2" to-layer="794" to-port="3" />
<edge from-layer="794" from-port="4" to-layer="795" to-port="1" />
<edge from-layer="795" from-port="2" to-layer="796" to-port="0" />
<edge from-layer="796" from-port="1" to-layer="799" to-port="0" />
<edge from-layer="796" from-port="1" to-layer="804" to-port="0" />
<edge from-layer="797" from-port="0" to-layer="804" to-port="1" />
<edge from-layer="797" from-port="0" to-layer="799" to-port="1" />
<edge from-layer="798" from-port="0" to-layer="804" to-port="2" />
<edge from-layer="798" from-port="0" to-layer="799" to-port="2" />
<edge from-layer="799" from-port="3" to-layer="800" to-port="0" />
<edge from-layer="800" from-port="1" to-layer="802" to-port="0" />
<edge from-layer="801" from-port="0" to-layer="802" to-port="1" />
<edge from-layer="802" from-port="2" to-layer="803" to-port="0" />
<edge from-layer="803" from-port="1" to-layer="809" to-port="0" />
<edge from-layer="804" from-port="3" to-layer="806" to-port="0" />
<edge from-layer="804" from-port="3" to-layer="807" to-port="1" />
<edge from-layer="805" from-port="0" to-layer="806" to-port="1" />
<edge from-layer="806" from-port="2" to-layer="807" to-port="0" />
<edge from-layer="807" from-port="2" to-layer="808" to-port="0" />
<edge from-layer="808" from-port="1" to-layer="809" to-port="1" />
<edge from-layer="809" from-port="2" to-layer="810" to-port="1" />
<edge from-layer="810" from-port="2" to-layer="812" to-port="0" />
<edge from-layer="811" from-port="0" to-layer="812" to-port="1" />
<edge from-layer="812" from-port="2" to-layer="813" to-port="0" />
<edge from-layer="813" from-port="1" to-layer="814" to-port="1" />
<edge from-layer="814" from-port="2" to-layer="815" to-port="1" />
<edge from-layer="815" from-port="2" to-layer="816" to-port="0" />
</edges>
<rt_info />
</net>