Source code for towhee.models.layers.activations.hardmish

# Copyright 2021 Ross Wightman . All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This code is modified by Zilliz.

from torch import nn

[docs]def hard_mish(x, inplace: bool = False): if inplace: return x.mul_(0.5 * (x + 2).clamp(min=0, max=2)) else: return 0.5 * x * (x + 2).clamp(min=0, max=2)
[docs]class HardMish(nn.Module): """ Hard Mish Experimental, based on notes by Mish author Diganta Misra at https://github.com/digantamisra98/H-Mish/blob/0da20d4bc58e696b6803f2523c58d3c8a82782d0/README.md Args: inplace(`Bool`): whether use inplace version. Returns: (`torch.Tensor`) output tensor after activation. """
[docs] def __init__(self, inplace: bool = False): super().__init__() self.inplace = inplace
[docs] def forward(self, x): return hard_mish(x, self.inplace)