Skip to content

Commit

Permalink
Update cache files [skip ci]
Browse files Browse the repository at this point in the history
  • Loading branch information
ivy-dev-bot committed Jul 19, 2024
1 parent c260163 commit df80243
Show file tree
Hide file tree
Showing 758 changed files with 103,986 additions and 322,905 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ivy/compiler/_cache/*.pkl filter=lfs diff=lfs merge=lfs -text
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,9 @@ def forward(self, input):
"""
return F.batch_norm(
input,
(
self.running_mean
if not self.training or self.track_running_stats
else None
),
self.running_mean
if not self.training or self.track_running_stats
else None,
self.running_var if not self.training or self.track_running_stats else None,
self.weight,
self.bias,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import ivy.functional.frontends.torch.nn.functional as F

import typing

from .Translated__MaxPoolNd import Translated__MaxPoolNd


class Translated_MaxPool2d(Translated__MaxPoolNd):
kernel_size: typing.Any
stride: typing.Any
padding: typing.Any
dilation: typing.Any

def forward(self, input):
return F.max_pool2d(
input,
self.kernel_size,
self.stride,
self.padding,
self.dilation,
ceil_mode=self.ceil_mode,
return_indices=self.return_indices,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import ivy.functional.frontends.torch.nn as nn

import typing


class Translated__MaxPoolNd(nn.Module):
__constants__ = [
"kernel_size",
"stride",
"padding",
"dilation",
"return_indices",
"ceil_mode",
]
return_indices: typing.Any
ceil_mode: typing.Any

def __init__(
self,
kernel_size,
stride=None,
padding=0,
dilation=1,
return_indices=False,
ceil_mode=False,
):
super().__init__()
self.kernel_size = kernel_size
self.stride = stride if stride is not None else kernel_size
self.padding = padding
self.dilation = dilation
self.return_indices = return_indices
self.ceil_mode = ceil_mode

def extra_repr(self):
return "kernel_size={kernel_size}, stride={stride}, padding={padding}, dilation={dilation}, ceil_mode={ceil_mode}".format(
**self.__dict__
)
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from .ivy__BatchNorm import ivy__BatchNorm
from .ivy__helpers import ivy_dim
from .ivy__helpers import ivy_dim_frnt_


class ivy_BatchNorm2d(ivy__BatchNorm):
def _check_input_dim(self, input):
if ivy_dim(input) != 4:
raise ValueError(f"expected 4D input (got {ivy_dim(input)}D input)")
if ivy_dim_frnt_(input) != 4:
raise ValueError(f"expected 4D input (got {ivy_dim_frnt_(input)}D input)")
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from .ivy__NormBase import ivy__NormBase
from .ivy__helpers import ivy_add_
from .ivy__helpers import ivy_batch_norm
from .ivy__helpers import ivy_add__frnt_
from .ivy__helpers import ivy_batch_norm_frnt


class ivy__BatchNorm(ivy__NormBase):
Expand All @@ -27,7 +27,7 @@ def forward(self, input):
exponential_average_factor = self.momentum
if self.training and self.track_running_stats:
if self.num_batches_tracked is not None:
self.num_batches_tracked = ivy_add_(self.num_batches_tracked, 1)
ivy_add__frnt_(self.num_batches_tracked, 1)
if self.momentum is None:
exponential_average_factor = 1.0 / float(self.num_batches_tracked)
else:
Expand All @@ -45,13 +45,11 @@ def forward(self, input):
passed when the update should occur (i.e. in training mode when they are tracked), or when buffer stats are
used for normalization (i.e. in eval mode when buffers are not None).
"""
normalized, self.running_mean, self.running_var = ivy_batch_norm(
normalized, self.running_mean, self.running_var = ivy_batch_norm_frnt(
input,
(
self.running_mean
if not self.training or self.track_running_stats
else None
),
self.running_mean
if not self.training or self.track_running_stats
else None,
self.running_var if not self.training or self.track_running_stats else None,
self.weight,
self.bias,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,17 @@

import typing

from .ivy__helpers import ivy_add
from .ivy__helpers import ivy_device
from .ivy__helpers import ivy_empty
from .ivy__helpers import ivy_fill_
from .ivy__helpers import ivy_ones
from .ivy__helpers import ivy_add_frnt_
from .ivy__helpers import ivy_device_frnt
from .ivy__helpers import ivy_empty_frnt
from .ivy__helpers import ivy_fill__frnt_
from .ivy__helpers import ivy_ones_
from .ivy__helpers import ivy_split
from .ivy__helpers import ivy_tensor
from .ivy__helpers import ivy_zero_
from .ivy__helpers import ivy_zeros
from .ivy__helpers import ivy_ones_frnt
from .ivy__helpers import ivy_split_frnt_
from .ivy__helpers import ivy_tensor_frnt
from .ivy__helpers import ivy_zero__frnt_
from .ivy__helpers import ivy_zeros_
from .ivy__helpers import ivy_zeros_frnt


class ivy__NormBase(ivy.Module):
Expand Down Expand Up @@ -55,23 +55,23 @@ def __init__(
self.affine = affine
self.track_running_stats = track_running_stats
if self.affine:
self.weight = ivy.Array(ivy_empty(num_features, **factory_kwargs))
self.bias = ivy.Array(ivy_empty(num_features, **factory_kwargs))
self.weight = ivy.Array(ivy_empty_frnt(num_features, **factory_kwargs))
self.bias = ivy.Array(ivy_empty_frnt(num_features, **factory_kwargs))
else:
self.register_parameter("weight", None)
self.register_parameter("bias", None)
if self.track_running_stats:
self.register_buffer(
"running_mean", ivy_zeros(num_features, **factory_kwargs)
"running_mean", ivy_zeros_frnt(num_features, **factory_kwargs)
)
self.register_buffer(
"running_var", ivy_ones(num_features, **factory_kwargs)
"running_var", ivy_ones_frnt(num_features, **factory_kwargs)
)
self.running_mean: typing.Any
self.running_var: typing.Any
self.register_buffer(
"num_batches_tracked",
ivy_tensor(
ivy_tensor_frnt(
0,
dtype=ivy.int64,
**{k: v for k, v in factory_kwargs.items() if k != "dtype"},
Expand All @@ -86,9 +86,9 @@ def __init__(

def reset_running_stats(self):
if self.track_running_stats:
ivy_zero_(self.running_mean)
ivy_fill_(self.running_var, 1)
ivy_zero_(self.num_batches_tracked)
ivy_zero__frnt_(self.running_mean)
ivy_fill__frnt_(self.running_var, 1)
ivy_zero__frnt_(self.num_batches_tracked)

def reset_parameters(self):
self.reset_running_stats()
Expand Down Expand Up @@ -121,8 +121,8 @@ def _load_from_state_dict(
state_dict[num_batches_tracked_key] = (
self.num_batches_tracked
if self.num_batches_tracked is not None
and self.num_batches_tracked.device != ivy_device("meta")
else ivy_tensor(0, dtype=ivy.int64)
and self.num_batches_tracked.device != ivy_device_frnt("meta")
else ivy_tensor_frnt(0, dtype=ivy.int64)
)
super()._load_from_state_dict(
state_dict,
Expand Down Expand Up @@ -192,7 +192,7 @@ def __repr__(self):
extra_lines = []
extra_repr = self._extra_repr()
if extra_repr:
extra_lines = ivy_split(extra_repr, "\n")
extra_lines = ivy_split_frnt_(extra_repr, "\n")
child_lines = []
for key, module in self._module_dict.items():
mod_str = repr(module)
Expand Down Expand Up @@ -297,7 +297,7 @@ def _named_members(
if v is None or id(v) in memo:
continue
if remove_duplicate:
ivy_add(memo, id(v))
ivy_add_frnt_(memo, id(v))
name = module_prefix + ("." if module_prefix else "") + k
yield name, v

Expand Down Expand Up @@ -378,7 +378,7 @@ def get_parameter(self, target):
def get_submodule(self, target):
if target == "":
return self
atoms: typing.Any = ivy_split(target, ".")
atoms: typing.Any = ivy_split_frnt_(target, ".")
mod: typing.Any = self
for item in atoms:
if not hasattr(mod, item):
Expand Down Expand Up @@ -415,7 +415,7 @@ def named_children(self):
memo = set()
for name, module in self._module_dict.items():
if module is not None and id(module) not in memo:
ivy_add(memo, id(module))
ivy_add_frnt_(memo, id(module))
yield name, module

def named_modules(self, memo=None, prefix="", remove_duplicate=True):
Expand All @@ -427,7 +427,7 @@ def named_modules(self, memo=None, prefix="", remove_duplicate=True):
memo = set()
if id(self) not in memo:
if remove_duplicate:
ivy_add(memo, id(self))
ivy_add_frnt_(memo, id(self))
yield prefix, self
for name, module in self._module_dict.items():
if module is None:
Expand Down
Loading

0 comments on commit df80243

Please sign in to comment.