U
    4AfL                  E   @   s  d Z ddlZddlZddlZddlZddlmZ ddlmZm	Z	m
Z
 ddlmZ ddlmZmZ ddlmZ dd	lmZmZmZmZ d
dlmZ d
dlmZmZmZmZ eeZ edddddddddddddddddddd d!d"d#d$d%d&d'd(d)d*d+d,d-d.d/d0d1d2d3d4d5d6d7d8d9d:d;d<d=d>d?d@dAdBdCdDdEdFdGdHdIdJdKdLdMdNdOdPgDZ!eee!Z"e#dQdRdSZ$dZe
e#ej%f e	e
e#ej%f  e&e	e& e	ee#e#f  e	e
e&e#f  e	e# e&dUdVdWZ'G dXdY dYZ(dS )[zAutoFeatureExtractor class.    N)OrderedDict)DictOptionalUnion   )PretrainedConfig)get_class_from_dynamic_moduleresolve_trust_remote_code)FeatureExtractionMixin)CONFIG_NAMEFEATURE_EXTRACTOR_NAMEget_file_from_repologging   )_LazyAutoMapping)CONFIG_MAPPING_NAMES
AutoConfigmodel_type_to_module_name!replace_list_option_in_docstrings)zaudio-spectrogram-transformerZASTFeatureExtractor)ZbeitBeitFeatureExtractor)Zchinese_clipZChineseCLIPFeatureExtractor)ZclapZClapFeatureExtractor)ZclipCLIPFeatureExtractor)ZclipsegViTFeatureExtractor)ZclvpZClvpFeatureExtractor)Zconditional_detrZConditionalDetrFeatureExtractor)ZconvnextConvNextFeatureExtractor)cvtr   )zdata2vec-audioWav2Vec2FeatureExtractor)zdata2vec-visionr   )Zdeformable_detrZDeformableDetrFeatureExtractor)ZdeitZDeiTFeatureExtractor)ZdetrDetrFeatureExtractor)Zdinatr   )z
donut-swinZDonutFeatureExtractor)ZdptZDPTFeatureExtractor)ZencodecZEncodecFeatureExtractor)ZflavaZFlavaFeatureExtractor)ZglpnZGLPNFeatureExtractor)Zgroupvitr   )Zhubertr   )ZimagegptZImageGPTFeatureExtractor)Z
layoutlmv2ZLayoutLMv2FeatureExtractor)Z
layoutlmv3ZLayoutLMv3FeatureExtractor)ZlevitZLevitFeatureExtractor)Z
maskformerZMaskFormerFeatureExtractor)ZmctctZMCTCTFeatureExtractor)Zmobilenet_v1ZMobileNetV1FeatureExtractor)Zmobilenet_v2ZMobileNetV2FeatureExtractor)Z	mobilevitZMobileViTFeatureExtractor)Znatr   )ZowlvitZOwlViTFeatureExtractor)Z	perceiverZPerceiverFeatureExtractor)Z
poolformerZPoolFormerFeatureExtractor)Z	pop2pianoZPop2PianoFeatureExtractor)Zregnetr   )Zresnetr   )Zseamless_m4tSeamlessM4TFeatureExtractor)Zseamless_m4t_v2r   )Z	segformerZSegformerFeatureExtractor)Zsewr   )zsew-dr   )Zspeech_to_textZSpeech2TextFeatureExtractor)Zspeecht5ZSpeechT5FeatureExtractor)Zswiftformerr   )Zswinr   )Zswinv2r   )ztable-transformerr   )ZtimesformerVideoMAEFeatureExtractor)ZtvltZTvltFeatureExtractor)Z	unispeechr   )zunispeech-satr   )ZunivnetZUnivNetFeatureExtractor)Zvanr   )Zvideomaer   )ZviltZViltFeatureExtractor)Zvitr   )Zvit_maer   )Zvit_msnr   )Zwav2vec2r   )zwav2vec2-bertr   )zwav2vec2-conformerr   )Zwavlmr   )ZwhisperZWhisperFeatureExtractor)Zxclipr   )ZyolosZYolosFeatureExtractor)
class_namec              	   C   s   t  D ]T\}}| |krt|}td| d}zt|| W   S  tk
rZ   Y qY qX qtj D ] \}}t|dd | krh|  S qhtd}t	|| rt|| S d S )N.ztransformers.models__name__Ztransformers)
FEATURE_EXTRACTOR_MAPPING_NAMESitemsr   	importlibimport_modulegetattrAttributeErrorFEATURE_EXTRACTOR_MAPPINGZ_extra_contenthasattr)r   module_nameZ
extractorsmodule_Z	extractorZmain_module r,   T/tmp/pip-unpacked-wheel-zw5xktn0/transformers/models/auto/feature_extraction_auto.py!feature_extractor_class_from_namet   s    



r.   F)pretrained_model_name_or_path	cache_dirforce_downloadresume_downloadproxiestokenrevisionlocal_files_onlyc                 K   s   | dd}	|	dk	r4tdt |dk	r0td|	}t| t|||||||d	}
|
dkrdtd i S t	|
dd}t
|W  5 Q R  S Q R X dS )	a2  
    Loads the tokenizer configuration from a pretrained model tokenizer configuration.

    Args:
        pretrained_model_name_or_path (`str` or `os.PathLike`):
            This can be either:

            - a string, the *model id* of a pretrained model configuration hosted inside a model repo on
              huggingface.co.
            - a path to a *directory* containing a configuration file saved using the
              [`~PreTrainedTokenizer.save_pretrained`] method, e.g., `./my_model_directory/`.

        cache_dir (`str` or `os.PathLike`, *optional*):
            Path to a directory in which a downloaded pretrained model configuration should be cached if the standard
            cache should not be used.
        force_download (`bool`, *optional*, defaults to `False`):
            Whether or not to force to (re-)download the configuration files and override the cached versions if they
            exist.
        resume_download:
            Deprecated and ignored. All downloads are now resumed by default when possible.
            Will be removed in v5 of Transformers.
        proxies (`Dict[str, str]`, *optional*):
            A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
            'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
        token (`str` or *bool*, *optional*):
            The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
            when running `huggingface-cli login` (stored in `~/.huggingface`).
        revision (`str`, *optional*, defaults to `"main"`):
            The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
            git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
            identifier allowed by git.
        local_files_only (`bool`, *optional*, defaults to `False`):
            If `True`, will only try to load the tokenizer configuration from local files.

    <Tip>

    Passing `token=True` is required when you want to use a private model.

    </Tip>

    Returns:
        `Dict`: The configuration of the tokenizer.

    Examples:

    ```python
    # Download configuration from huggingface.co and cache.
    tokenizer_config = get_tokenizer_config("google-bert/bert-base-uncased")
    # This model does not have a tokenizer config so the result will be an empty dict.
    tokenizer_config = get_tokenizer_config("FacebookAI/xlm-roberta-base")

    # Save a pretrained tokenizer locally and you can reload its config
    from transformers import AutoTokenizer

    tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-cased")
    tokenizer.save_pretrained("tokenizer-test")
    tokenizer_config = get_tokenizer_config("tokenizer-test")
    ```use_auth_tokenNrThe `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.V`token` and `use_auth_token` are both specified. Please set only the argument `token`.)r0   r1   r2   r3   r4   r5   r6   zdCould not locate the feature extractor configuration file, will try to use the model config instead.zutf-8)encoding)popwarningswarnFutureWarning
ValueErrorr   r   loggerinfoopenjsonload)r/   r0   r1   r2   r3   r4   r5   r6   kwargsr7   Zresolved_config_filereaderr,   r,   r-   get_feature_extractor_config   s6    ErG   c                   @   s:   e Zd ZdZdd Zeeedd Ze	d
ddZ
d	S )AutoFeatureExtractora+  
    This is a generic feature extractor class that will be instantiated as one of the feature extractor classes of the
    library when created with the [`AutoFeatureExtractor.from_pretrained`] class method.

    This class cannot be instantiated directly using `__init__()` (throws an error).
    c                 C   s   t dd S )NzAutoFeatureExtractor is designed to be instantiated using the `AutoFeatureExtractor.from_pretrained(pretrained_model_name_or_path)` method.)EnvironmentError)selfr,   r,   r-   __init__   s    zAutoFeatureExtractor.__init__c                 K   s  | dd}|dk	r@tdt |dddk	r8td||d< | dd}| dd}d|d	< tj|f|\}}|d
d}d}	d|di kr|d d }	|dkr|	dkrt|t	st
j|f|}t|d
d}t|drd|jkr|jd }	|dk	rt|}|	dk	}
|dk	pt|tk}t||||
}|
rx|rxt|	|f|}| dd}tj|rj|  |j|f|S |dk	r|j|f|S t|tkrtt| }|j|f|S td| dt dt dt dddd t D  
dS )a  
        Instantiate one of the feature extractor classes of the library from a pretrained model vocabulary.

        The feature extractor class to instantiate is selected based on the `model_type` property of the config object
        (either passed as an argument or loaded from `pretrained_model_name_or_path` if possible), or when it's
        missing, by falling back to using pattern matching on `pretrained_model_name_or_path`:

        List options

        Params:
            pretrained_model_name_or_path (`str` or `os.PathLike`):
                This can be either:

                - a string, the *model id* of a pretrained feature_extractor hosted inside a model repo on
                  huggingface.co.
                - a path to a *directory* containing a feature extractor file saved using the
                  [`~feature_extraction_utils.FeatureExtractionMixin.save_pretrained`] method, e.g.,
                  `./my_model_directory/`.
                - a path or url to a saved feature extractor JSON *file*, e.g.,
                  `./my_model_directory/preprocessor_config.json`.
            cache_dir (`str` or `os.PathLike`, *optional*):
                Path to a directory in which a downloaded pretrained model feature extractor should be cached if the
                standard cache should not be used.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether or not to force to (re-)download the feature extractor files and override the cached versions
                if they exist.
            resume_download:
                Deprecated and ignored. All downloads are now resumed by default when possible.
                Will be removed in v5 of Transformers.
            proxies (`Dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
            token (`str` or *bool*, *optional*):
                The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
                when running `huggingface-cli login` (stored in `~/.huggingface`).
            revision (`str`, *optional*, defaults to `"main"`):
                The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
                git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
                identifier allowed by git.
            return_unused_kwargs (`bool`, *optional*, defaults to `False`):
                If `False`, then this function returns just the final feature extractor object. If `True`, then this
                functions returns a `Tuple(feature_extractor, unused_kwargs)` where *unused_kwargs* is a dictionary
                consisting of the key/value pairs whose keys are not feature extractor attributes: i.e., the part of
                `kwargs` which has not been used to update `feature_extractor` and is otherwise ignored.
            trust_remote_code (`bool`, *optional*, defaults to `False`):
                Whether or not to allow for custom models defined on the Hub in their own modeling files. This option
                should only be set to `True` for repositories you trust and in which you have read the code, as it will
                execute code present on the Hub on your local machine.
            kwargs (`Dict[str, Any]`, *optional*):
                The values in kwargs of any keys which are feature extractor attributes will be used to override the
                loaded values. Behavior concerning key/value pairs whose keys are *not* feature extractor attributes is
                controlled by the `return_unused_kwargs` keyword parameter.

        <Tip>

        Passing `token=True` is required when you want to use a private model.

        </Tip>

        Examples:

        ```python
        >>> from transformers import AutoFeatureExtractor

        >>> # Download feature extractor from huggingface.co and cache.
        >>> feature_extractor = AutoFeatureExtractor.from_pretrained("facebook/wav2vec2-base-960h")

        >>> # If feature extractor files are in a directory (e.g. feature extractor was saved using *save_pretrained('./test/saved_model/')*)
        >>> # feature_extractor = AutoFeatureExtractor.from_pretrained("./test/saved_model/")
        ```r7   Nr8   r4   r9   configtrust_remote_codeTZ
_from_autoZfeature_extractor_typerH   auto_mapZcode_revisionz"Unrecognized feature extractor in z4. Should have a `feature_extractor_type` key in its z of z3, or one of the following `model_type` keys in its z: z, c                 s   s   | ]
}|V  qd S )Nr,   ).0cr,   r,   r-   	<genexpr>  s     z7AutoFeatureExtractor.from_pretrained.<locals>.<genexpr>)r;   r<   r=   r>   getr?   r
   Zget_feature_extractor_dict
isinstancer   r   from_pretrainedr%   r(   rN   r.   typer'   r	   r   ospathisdirZregister_for_auto_class	from_dictr   r   joinr!   keys)clsr/   rE   r7   rL   rM   Zconfig_dictr+   feature_extractor_classZfeature_extractor_auto_mapZhas_remote_codeZhas_local_coder,   r,   r-   rT      sl    I


    
4z$AutoFeatureExtractor.from_pretrainedFc                 C   s   t j| ||d dS )a0  
        Register a new feature extractor for this class.

        Args:
            config_class ([`PretrainedConfig`]):
                The configuration corresponding to the model to register.
            feature_extractor_class ([`FeatureExtractorMixin`]): The feature extractor to register.
        )exist_okN)r'   register)Zconfig_classr]   r^   r,   r,   r-   r_     s    
zAutoFeatureExtractor.registerN)F)r    
__module____qualname____doc__rK   classmethodr   r!   rT   staticmethodr_   r,   r,   r,   r-   rH      s    rH   )NFNNNNF))rb   r#   rC   rV   r<   collectionsr   typingr   r   r   Zconfiguration_utilsr   Zdynamic_module_utilsr   r	   Zfeature_extraction_utilsr
   utilsr   r   r   r   Zauto_factoryr   Zconfiguration_autor   r   r   r   Z
get_loggerr    r@   r!   r'   strr.   PathLikeboolrG   rH   r,   r,   r,   r-   <module>   s   
I
       d