o
    )i;"                     @   s  d dl mZ d dlmZmZ d dlZd dlmZ d dlm  m	Z	 ddl
mZ ddlmZ ddlmZmZmZ dd	lmZ dd
lmZmZ g dZG dd dejZG dd dejZdedee dededef
ddZedddZG dd deZG dd deZ e edej!fddd d!dee dededefd"d#Z"e ede j!fddd d!dee  dededefd$d%Z#dS )&    )partial)AnyOptionalN   )ImageClassification)_log_api_usage_once   )register_modelWeightsWeightsEnum)_IMAGENET_CATEGORIES)_ovewrite_named_paramhandle_legacy_interface)
SqueezeNetSqueezeNet1_0_WeightsSqueezeNet1_1_Weightssqueezenet1_0squeezenet1_1c                
       sH   e Zd Zdededededdf
 fddZd	ejdejfd
dZ  ZS )Fireinplanessqueeze_planesexpand1x1_planesexpand3x3_planesreturnNc                    sv   t    || _tj||dd| _tjdd| _tj||dd| _tjdd| _	tj||ddd| _
tjdd| _d S )Nr   kernel_sizeTZinplace   )r   padding)super__init__r   nnConv2dsqueezeReLUsqueeze_activation	expand1x1expand1x1_activation	expand3x3expand3x3_activation)selfr   r   r   r   	__class__ l/var/www/html/eduruby.in/lip-sync/lip-sync-env/lib/python3.10/site-packages/torchvision/models/squeezenet.pyr       s   
zFire.__init__xc                 C   s8   |  | |}t| | || | |gdS Nr   )r%   r#   torchcatr'   r&   r)   r(   r*   r/   r-   r-   r.   forward   s    zFire.forward)	__name__
__module____qualname__intr    r1   Tensorr4   __classcell__r-   r-   r+   r.   r      s    "
r   c                	       sF   e Zd Zddedededdf fd	d
ZdejdejfddZ	  Z
S )r   1_0        ?versionnum_classesdropoutr   Nc                    s6  t    t|  || _|dkrhttjdddddtjddtjdddd	t	dd
ddt	dd
ddt	ddddtjdddd	t	ddddt	ddddt	ddddt	ddddtjdddd	t	dddd| _
nd|dkrttjdddddtjddtjdddd	t	dd
ddt	dd
ddtjdddd	t	ddddt	ddddtjdddd	t	ddddt	ddddt	ddddt	dddd| _
ntd| dtjd| jdd}ttj|d|tjddtd| _|  D ]+}t|tjr||u rtj|jddd nt|j |jd urt|jd qd S )Nr;   r   `      r   )r   strideTr   )r   rC   Z	ceil_mode   @             0      i  i   1_1zUnsupported SqueezeNet version z: 1_0 or 1_1 expectedr   r   )p)r   r   g        g{Gz?)meanZstdr   )r   r    r   r?   r!   Z
Sequentialr"   r$   Z	MaxPool2dr   features
ValueErrorZDropoutZAdaptiveAvgPool2d
classifiermodules
isinstanceinitZnormal_weightZkaiming_uniform_ZbiasZ	constant_)r*   r>   r?   r@   Z
final_convmr+   r-   r.   r    %   sb   



zSqueezeNet.__init__r/   c                 C   s    |  |}| |}t|dS r0   )rN   rP   r1   flattenr3   r-   r-   r.   r4   ^   s   

zSqueezeNet.forward)r;   r<   r=   )r5   r6   r7   strr8   floatr    r1   r9   r4   r:   r-   r-   r+   r.   r   $   s     9r   r>   weightsprogresskwargsr   c                 K   sN   |d urt |dt|jd  t| fi |}|d ur%||j|dd |S )Nr?   
categoriesT)rZ   Z
check_hash)r   lenmetar   Zload_state_dictZget_state_dict)r>   rY   rZ   r[   modelr-   r-   r.   _squeezenetd   s   r`   z@https://github.com/pytorch/vision/pull/49#issuecomment-277560717zXThese weights reproduce closely the results of the paper using a simple training recipe.)r\   ZrecipeZ_docsc                
   @   D   e Zd Zedeeddi edddddd	id
dddZeZdS )r   z>https://download.pytorch.org/models/squeezenet1_0-b66bff10.pth   Z	crop_size)   rd   i ImageNet-1KgM@g{GT@zacc@1zacc@5gh|?5?g&1@Zmin_sizeZ
num_paramsZ_metricsZ_ops
_file_sizeurlZ
transformsr^   N	r5   r6   r7   r
   r   r   _COMMON_METAIMAGENET1K_V1DEFAULTr-   r-   r-   r.   r   |   &    
r   c                
   @   ra   )r   z>https://download.pytorch.org/models/squeezenet1_1-b8a52dc0.pthrb   rc   )   rp   i( re   gX9M@g-'T@rf   gtV?g"~@rg   ri   Nrk   r-   r-   r-   r.   r      ro   r   Z
pretrained)rY   T)rY   rZ   c                 K      t | } td| |fi |S )a  SqueezeNet model architecture from the `SqueezeNet: AlexNet-level
    accuracy with 50x fewer parameters and <0.5MB model size
    <https://arxiv.org/abs/1602.07360>`_ paper.

    Args:
        weights (:class:`~torchvision.models.SqueezeNet1_0_Weights`, optional): The
            pretrained weights to use. See
            :class:`~torchvision.models.SqueezeNet1_0_Weights` below for
            more details, and possible values. By default, no pre-trained
            weights are used.
        progress (bool, optional): If True, displays a progress bar of the
            download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.squeezenet.SqueezeNet``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/squeezenet.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.SqueezeNet1_0_Weights
        :members:
    r;   )r   verifyr`   rY   rZ   r[   r-   r-   r.   r      s   
r   c                 K   rq   )a/  SqueezeNet 1.1 model from the `official SqueezeNet repo
    <https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_.

    SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters
    than SqueezeNet 1.0, without sacrificing accuracy.

    Args:
        weights (:class:`~torchvision.models.SqueezeNet1_1_Weights`, optional): The
            pretrained weights to use. See
            :class:`~torchvision.models.SqueezeNet1_1_Weights` below for
            more details, and possible values. By default, no pre-trained
            weights are used.
        progress (bool, optional): If True, displays a progress bar of the
            download to stderr. Default is True.
        **kwargs: parameters passed to the ``torchvision.models.squeezenet.SqueezeNet``
            base class. Please refer to the `source code
            <https://github.com/pytorch/vision/blob/main/torchvision/models/squeezenet.py>`_
            for more details about this class.

    .. autoclass:: torchvision.models.SqueezeNet1_1_Weights
        :members:
    rK   )r   rr   r`   rs   r-   r-   r.   r      s   
r   )$	functoolsr   typingr   r   r1   Ztorch.nnr!   Ztorch.nn.initrS   Ztransforms._presetsr   utilsr   Z_apir	   r
   r   _metar   _utilsr   r   __all__Moduler   r   rW   boolr`   rl   r   r   rm   r   r   r-   r-   r-   r.   <module>   sl    @
