Skip to content

Commit

Permalink
Cherry pick #2235 to rls2.1 (#2236)
Browse files Browse the repository at this point in the history
* cherry pick #2235

* Update linear_fusion.py

* Update linear_fusion.py

* Update linear_fusion.py
  • Loading branch information
jianan-gu authored Nov 6, 2023
1 parent 64b0681 commit be34996
Showing 1 changed file with 12 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def __init__(self, module, tpp=False, woq=False):
self.linear = module

def forward(self, x):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
return torch.ops.torch_ipex.tpp_linear_silu(
x,
Expand All @@ -45,7 +45,7 @@ def __init__(self, module, tpp=False, woq=False):
self.linear = module

def forward(self, x):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
return torch.ops.torch_ipex.tpp_linear_relu(
x,
Expand All @@ -63,7 +63,7 @@ def __init__(self, module, tpp=False, woq=False):
self.linear = module

def forward(self, x, y):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
y = y.to(self.dtype).contiguous()
return torch.ops.torch_ipex.tpp_linear_mul(
Expand All @@ -83,7 +83,7 @@ def __init__(self, module, tpp=False, woq=False):
self.linear = module

def forward(self, x, y):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
y = y.to(self.dtype).contiguous()
return torch.ops.torch_ipex.tpp_linear_add(
Expand Down Expand Up @@ -114,7 +114,7 @@ def __init__(self, module, tpp=False, woq=False):
self.linear = module

def forward(self, x, y, z):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
y = y.to(self.dtype).contiguous()
z = z.to(self.dtype).contiguous()
Expand Down Expand Up @@ -147,7 +147,7 @@ def __init__(self, module, tpp=False, woq=False):
self.linear = module

def forward(self, x):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
return torch.ops.torch_ipex.tpp_linear_gelu(
x,
Expand Down Expand Up @@ -186,7 +186,7 @@ def __init__(self, module, tpp=False, woq=False):
self.gelu = nn.GELU()

def forward(self, x):
if self.tpp:
if self.tpp and not self.linear.tpp_fallback:
x = x.to(self.dtype).contiguous()
return torch.ops.torch_ipex.tpp_linear_gelu(
x,
Expand Down Expand Up @@ -320,7 +320,11 @@ def __init__(self, module_s, module_m, tpp=False, woq=False):
self.dtype = module_s.weight.dtype if self.tpp else None

def forward(self, x):
if self.tpp:
if (
self.tpp
and not self.linear_s.tpp_fallback
and not self.linear_m.tpp_fallback
):
x = x.to(self.dtype).contiguous()
x1 = torch.ops.torch_ipex.tpp_linear_silu(
x,
Expand Down

0 comments on commit be34996

Please sign in to comment.