Skip to content

Commit

Permalink
dynamic shape handle
Browse files Browse the repository at this point in the history
  • Loading branch information
ksxyhtqwlq committed Mar 19, 2024
1 parent bc59bcb commit c4ff9a7
Showing 1 changed file with 2 additions and 4 deletions.
6 changes: 2 additions & 4 deletions frontend/guard_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1744,7 +1744,7 @@ def set_if_inplace_return() -> None:
"flatten_parameters", "numel", "children",
"named_parameters", "_weights_have_changed",
"check_forward_args", "permute_hidden", "_check_input_dim",
"parameters"):
"parameters", "_has_torch_function_unary"):
return
if hasattr(func, "__module__"
) and func.__module__ == 'torch.autograd.profiler':
Expand Down Expand Up @@ -1945,10 +1945,8 @@ def BINARY_SUBSCR(self, inst: Instruction) -> None:
obj1 = get_value_stack_from_top(self.frame, 1)
obj2 = get_value_stack_from_top(self.frame, 0)
if torch.is_tensor(obj1):
if torch.is_tensor(obj2):
if torch.is_tensor(obj2) and obj2.dtype == torch.bool:
raise ValueError("dynamic shape in tensor")
if dyn.contains(obj2):
raise ValueError("dynamic shape in dyn scalar")
self.call_function(operator.getitem, [obj1, obj2], {})

def unary_operation(self, func: Callable[..., Any]) -> None:
Expand Down

0 comments on commit c4ff9a7

Please sign in to comment.