|
|
""" |
|
|
2025.3.17 |
|
|
2025.3.19 |
|
|
4.50.0 |
|
|
0.15.2 |
|
|
__UNSLOTH_VERSIONING__ |
|
|
""" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os |
|
|
import importlib.util |
|
|
if importlib.util.find_spec("unsloth_studio") is None: |
|
|
UNSLOTH_STUDIO_ENABLED = False |
|
|
else: |
|
|
UNSLOTH_STUDIO_ENABLED = os.environ.get("UNSLOTH_STUDIO_DISABLED", "0") == "0" |
|
|
pass |
|
|
from typing import List, Dict, Tuple, Optional, Any, Callable |
|
|
import math |
|
|
|
|
|
torch_compile_options = {'epilogue_fusion': True, 'max_autotune': False, 'shape_padding': True, 'trace.enabled': False, 'triton.cudagraphs': False} |
|
|
from torch import Tensor |
|
|
import torch |
|
|
import torch.nn as nn |
|
|
from torch.nn import functional as F |
|
|
from transformers.models.gemma3.modeling_gemma3 import (List, Optional, Tuple, nn) |
|
|
|
|
|
def forward(self, input: Tensor, output_size: Optional[List[int]] = None) -> Tensor: |
|
|
if self.padding_mode != "zeros": |
|
|
raise ValueError( |
|
|
"Only `zeros` padding mode is supported for ConvTranspose2d" |
|
|
) |
|
|
|
|
|
assert isinstance(self.padding, tuple) |
|
|
|
|
|
|
|
|
num_spatial_dims = 2 |
|
|
output_padding = self._output_padding( |
|
|
input, |
|
|
output_size, |
|
|
self.stride, |
|
|
self.padding, |
|
|
self.kernel_size, |
|
|
num_spatial_dims, |
|
|
self.dilation, |
|
|
) |
|
|
|
|
|
return F.conv_transpose2d( |
|
|
input, |
|
|
self.weight, |
|
|
self.bias, |
|
|
self.stride, |
|
|
self.padding, |
|
|
output_padding, |
|
|
self.groups, |
|
|
self.dilation, |
|
|
).to(input.dtype).to(input.dtype) |
|
|
|