fix bugs with the vww example validated

This commit is contained in:
meenchen
2022-12-11 16:56:18 -05:00
parent 64fd217076
commit fcc1eba7c7
4 changed files with 21 additions and 7 deletions

View File

@@ -166,10 +166,13 @@ class PatchResizer:
else:
layer_info["is_patch"] = False
# We need to cut off the link between patch blocks and normal inference blocks,
# We need to:
# 1. cut off the link between patch blocks and normal inference blocks,
# 2. set the lifetime of the input tensor of the first layer in the second stage to start from begging
# so the memory buffers can be allocated successfully
if PatchLayers > 0:
self.layer[PatchLayers].params["input_idx"] = (
str(self.layer[PatchLayers].params["input_idx"]) + "_start_normal_infernece_block"
)
self.layer[PatchLayers].input_tensors[0].graph_idx = self.layer[PatchLayers].params["input_idx"]
self.layer[PatchLayers].params["is_start_of_normal_inference_block"] = True