o
    h                     @   s  U d Z ddlZddlZddlZddlZddlZddlZddlZddlm	Z	 ddl
mZmZmZmZ ddlmZ ddlZddlmZmZmZmZ ddlmZ ddlmZmZmZ dd	lmZmZ dd
l m!Z! ddl"m#Z# ej$j%Z&ej$j'Z(edddZ)edZ*dLddZ+dMddZ,G dd dej-j!Z.dd Z/ej01ddZ2ej01ddZ3ej01ddZ4ej5dNddZ6ej7dfdd Z8d!d" Z9d#d$ Z:G d%d& d&e;Z<e= 	dMd'd(Z>G d)d* d*e?Z@e@A  ej$B  d+d, ZCd-d. ZDd/d0 ZEd1d2 ZFddddd3dddeddfd4d5ZGG d6d7 d7eHe	ZIG d8d9 d9eHe	ZJddddd3dddeddfd:d;ZKdZLeeMeef  eNd<< dddd3dddeddf
d=d>ZOd?d@ ZPG dAdB dBeZQG dCdD dDeQZRdEee*e)f dFee*e)f fdGdHZS	I					dOdJdKZTdS )PzTracing.

This module contains functionality to support the JIT's tracing frontend, notably:
    * torch.jit.trace
    * torch.jit.trace_module

This is not intended to be imported directly; please use the exposed
functionalities in `torch.jit`.
    N)Enum)AnyCallableOptionalTypeVar)	ParamSpec)_get_model_id_qualified_nameget_callable_argument_namesis_scripting)function)_CachedForwardscriptScriptModule)_enabled
_python_cu)Module)default_tolerancesRT)	covariantP   c                    s    fdd}|S )Nc                    s   t  }|s
tdd}| d k r%|j}|std|d7 }| d k s|j}| D ]\}}t|tjrF| |u rF|dkrB|  S d  S q,dS )Nzfailed to inspect framer   r   zfailed to get frameself )	inspectcurrentframeRuntimeErrorf_backf_localsitems
isinstancetorchTensor)varframeir   kv	frames_up d/var/www/html/construction_image-detection-poc/venv/lib/python3.10/site-packages/torch/jit/_trace.py_get_interpreter_name_for_var-   s    zI_create_interpreter_name_lookup_fn.<locals>._get_interpreter_name_for_varr*   )r)   r,   r*   r(   r+   "_create_interpreter_name_lookup_fn,   s   r-   Fc                 C   sh   | j dd}t| }t }| D ]\}}t||v rq|t| |r+|||< q| ||< q|S )NT	keep_vars)
state_dicttypesetr   idadddetach)moduler/   r0   filtered_dictseen_idsr&   r'   r*   r*   r+   _unique_state_dictC   s   

r9   c                       s6   e Zd Z				d fdd	ZdejfddZ  ZS )	ONNXTracedModuleTFc                    s,   t    || _|| _|| _|| _|| _d S N)super__init__innerstrict_force_outplace_return_inputs_return_inputs_states)r   r>   r?   force_outplacereturn_inputsreturn_inputs_states	__class__r*   r+   r=   V   s   

zONNXTracedModule.__init__argsc                    s   t |\ ttdd }g g g  fdd}tj|| t jj	\}}j
r>|d d fS jrJ|d d fS |d fS )NTr.   c                     s   g }t tD ]}t| | tjstd|| |  qt| }jr2t	dd | D  j
r=t|  j|  j
rPd |fd< t\}}t|dkr`|d S t	|S )NzExpected Tensor argumentc                 s   s    | ]
}|j tjd V  qdS )memory_formatN)cloner!   preserve_format).0xr*   r*   r+   	<genexpr>}   s    z<ONNXTracedModule.forward.<locals>.wrapper.<locals>.<genexpr>r   r   )rangelenr    r!   r"   r   append
_unflattenrA   tuplerB   r>   _flatten)rH   in_argsr%   trace_inputsout_vars_in_descin_varsinputs_statesouts
ret_inputsr   r*   r+   wrapperr   s&   
z)ONNXTracedModule.forward.<locals>.wrapperr   )rU   listr9   valuesr!   _C_create_graph_by_tracingr-   r?   r@   rA   rB   )r   rH   module_stater`   graph_outr*   rZ   r+   forwardh   s$   zONNXTracedModule.forward)TFFF)__name__
__module____qualname__r=   r!   r"   rh   __classcell__r*   r*   rF   r+   r:   U   s    r:   c                    s$    fdd t jdd  dd| S )Nc                    sd   | d u rd S t | tjr+|  j| jrd ntjd| j}| j	d ur) |j	|_	|S | jtjdS )NrI   )
r    r!   r"   r5   rK   	is_mkldnnrL   requires_grad_requires_gradgrad)ar'   clone_inputr*   r+   rs      s   
z"_clone_inputs.<locals>.clone_inputc                 S   s   t | tjS r;   r    r!   r"   rN   r*   r*   r+   <lambda>   s    z_clone_inputs.<locals>.<lambda>tensors)condition_msg)r   _nested_map)rH   r*   rr   r+   _clone_inputs   s   
rz   PYTORCH_JIT_TIMEPYTORCH_JIT_DISABLEPYTORCH_JIT_STATSc                 c   s    t s|r
tj sd V  d S tj }tjjdd}tjjdd}|| zd V  W || |  t|  d| d|	| d d S || |  t|  d| d|	| d w )NT)enable_timing z time: z ms)
	_JIT_TIMEr!   cudais_availablecurrent_streamEventrecord_eventsynchronizeprintelapsed_time)
trace_namenametimestreamstartendr*   r*   r+   _time   s    


$
"r   c           
         s   t tjjstdt t t |ts|f} r!t	 }d fdd	}tj
j|dd ||dd\}}j| sBJ W d	   n1 sLw   Y   rX| ||dd
\}}	t|| t||	 d	S )aB  
    Verify that a JIT compiled model has the same behavior as its uncompiled version along with its backwards pass.

    If your model returns multiple outputs,
    you must also specify a `loss_fn` to produce a loss for which
    the backwards will be computed.

    This function has side-effects (e.g., it executes your model / saves and loads
    parameters), so don't expect the model to come out exactly the same as what
    you passed in.

    Args:
        model (compiled torch.nn.Module or function): the module/function to be
            verified.  The module/function definition MUST have been decorated with
            `@torch.jit.compile`.
        args (tuple or Tensor): the positional arguments to pass to the
            compiled function/module to be verified.  A non-tuple is assumed to
            be a single positional argument to be passed to the model.
        loss_fn (function, optional): the loss function to be applied to
            the output of the model, before backwards is invoked.  By default,
            we assume that a model returns a single result, and we :func:`torch.sum`
            before calling backwards; if this is inappropriate, you can pass your
            own loss function.  Note that if a model returns a tuple of results,
            these are passed as separate positional arguments to `loss_fn`.
        devices (iterable of device IDs, optional): the GPU devices which the
            compiled module will be run on.  This determines the RNG state we
            must save when running both compiled and uncompiled versions of the model.
    zICannot verify an uncompiled module.  Add @torch.jit.compile to compile itFc                    s    rt  ng }t| |f\}}}|r|  |r|j}|  }|r.|j|kr.tdt|ts6|f}tj	krKt
|dkrKtdt
| dt|\}	}dd |	D }
| }tj|g|}dd |D }|
|fS )Nz#failed to use the compiled functionr   zModel returns zO outputs, but default loss function (torch.sum) can only handle a single outputc                 S      g | ]}|  jtjd qS rI   r5   rK   r!   rL   rM   r'   r*   r*   r+   
<listcomp>
      z/verify.<locals>.run_fwd_bwd.<locals>.<listcomp>c                 S   r   r   r   r   r*   r*   r+   r     r   )ra   
parametersrU   clear_cachehitsr   r    rT   r!   sumrQ   
ValueErrorautogradrp   )rH   force_traceassert_compiledparamsr\   rY   compiled_fnr   outrX   
saved_outslossgradssaved_grads	is_moduleloss_fnmodelr*   r+   run_fwd_bwd   s4   
zverify.<locals>.run_fwd_bwdztorch.jit.verify)_callerT)r   N)r   )FF)r    r!   rc   CompiledFunction	TypeErrorr   rT   copydeepcopyr0   randomfork_rnghas_trace_forload_state_dict_verify_equal)
r   rH   r   devicessaved_stater   uncompiled_outsuncompiled_gradscompiled_outscompiled_gradsr*   r   r+   verify   s&   #



r   c                 C   s6   t | |D ]\}}||  dkrtdqd S )Ngư>z!JIT and real computation mismatch)zipsubabsmaxr   )xsysrN   yr*   r*   r+   r   !  s
   r   c                 C   s   d dd |  D S )N
c                 S   s   g | ]}d | qS )	r*   )rM   liner*   r*   r+   r   (      zindent.<locals>.<listcomp>)join
splitlines)sr*   r*   r+   indent'  s   r   c                       s   e Zd Zd fdd	Z  ZS )TracingCheckErrorNc                    s   d| _ |d ur|  j |d 7  _ |d ur&|  j d7  _ |  j t|d 7  _ |d ur<|  j d7  _ |  j t|d 7  _ t | j  d S )NzTracing failed sanity checks!
r   z+ERROR: Graphs differed across invocations!
zERROR: Tensor-valued Constant nodes differed in value across invocations. This often indicates that the tracer has encountered untraceable code.
)messager   r<   r=   )r   graph_diff_errortensor_compare_error	extra_msgrF   r*   r+   r=   ,  s   zTracingCheckError.__init__r;   )ri   rj   rk   r=   rl   r*   r*   rF   r+   r   +  s    r   c	                    s  | D ]}	t |	tjr|	f}	|rSi }
|	 D ]
\}}t||
|< qtjjt|d||
d|||tj	 dd	}|j
j |	j }	t |	tjsOt |	trRsR|	f}	n#retjj|d|||t|	dd}ntjj|t|	d|||dd}|  fdddd	 fd
d}dgfddfdd}||	d}|||	d}|||dr| |	d}|||d  }tdd |D rt| qd S )N__self__F)check_tracer?   r@   _module_class_compilation_unitexample_inputs_is_kwarg_store_inputs)r   r?   r@   r   example_kwarg_inputsr   )r   r?   r@   r   r   c                     s  t jj} t j|  t j|  t| }tdd|}t j j}t j| t j| t|}tdd|}d }||krdd l	}|
|d|d}dtd| d }t|  | D ]Q\}}t|t|kr|d7 }|
t|dt|d}	dtd|	 d }
| }|r|
d	t| d 7 }
| }|r|
d
t| d 7 }
||
7 } nqfd }t|  | D ]\}}| | kr ||fS | dkrW| sW| sW|dsq|ddks|ddkrq|d}|d}zt jj||dd W q ttfyV } z8|d u r!d}|dtt| d 7 }| }|r>|dt| d 7 }|dtt| 7 }W Y d }~ ||fS d }~ww q||fS )Nz___torch_mangle_[0-9]+\.r   r   TzGraph diff:
r   zFirst diverging operator:
zNode diff:
zTrace source location:
zCheck source location:
zprim::Constantvaluet)	equal_nanzNode:
zSource Location:
zComparison exception: )r!   rc   _jit_pass_canonicalizerf   _jit_pass_inline!_jit_pass_erase_shape_informationstrrer   difflibndiffr   r   r   r   nodessourceRangekind
mustBeNonehasAttributekindOfr   testingassert_closer   AssertionError)mod_canonicalizedmod_strcheck_canonicalized	check_strgraph_diff_errorsr   
graph_diffn_modn_check	node_diffsource_printout	mod_stackcheck_stacktensor_compare_errorsmod_tensor_valcheck_tensor_valecompare_stack)check_mod_functraced_funcr*   r+   graph_diagnostic_info}  s    




z+_check_trace.<locals>.graph_diagnostic_infoc                 S   s   t | tr| S | fS r;   )r    rT   ru   r*   r*   r+   wrap_retval  s   z!_check_trace.<locals>.wrap_retvalc              
      s   z#t |tr r| di |}n| t| }dd |D }|W S  tyH } z \}}d| dtt| }t|||d|d }~ww )Nc                 S   s   g | ]
}t |tjr|qS r*   rt   )rM   r   r*   r*   r+   r     s    zK_check_trace.<locals>.run_mod_and_filter_tensor_outputs.<locals>.<listcomp>z+encountered an exception while running the z with test inputs.
Exception:
)r   r*   )r    dictrz   	Exceptionr   r   r   )modinputsrunning_whatr^   r   r   r   msg)r   r   r   r*   r+   !run_mod_and_filter_tensor_outputs  s$   
z7_check_trace.<locals>.run_mod_and_filter_tensor_outputsc                     s    d rd S d d< dd j  D } t| dkr>d}|d7 }|ddd | D d d	 7 }|d
7 }tj|tdd d S d S )Nr   Tc                 S   s   g | ]}|  r|qS r*   )isNondeterministicrM   opr*   r*   r+   r     s
    zE_check_trace.<locals>.maybe_warn_nondeterministic.<locals>.<listcomp>z"Trace had nondeterministic nodes. z2Did you forget call .eval() on your model? Nodes:
r   c                 S   s   g | ]}t t|qS r*   )r   r   r  r*   r*   r+   r     s       zp
This may cause errors in trace checking. To disable trace checking, pass check_trace=False to torch.jit.trace()   category
stacklevel)rf   r   rQ   r   warningswarnTracerWarning)nondeterm_opsnondeterministic_ops_warning)
has_warnedr   r*   r+   maybe_warn_nondeterministic  s*   
z1_check_trace.<locals>.maybe_warn_nondeterministicc           
         s  d}t t| |D ]\}\}}z|jr| }|jr| }|jr%| }|jr,| }| s4| rMtjj	|
tj|
tj t||d dd nl|jsS|jrhtjj	| |  t||d dd nQt|dd stt|dd rt|dd t|dd ksJ t| | D ]\}}tjj	| |  t||d dd qntjj	| |  t||d dd W q	 ty }	 z#  tjdt|d  d | d t|	 tdd	 d
}W Y d }	~	q	d }	~	ww |S )NTr   )rtolatolr   	is_nestedz
Output nr zH. of the traced function does not match the corresponding output of the z. Detailed error:
   r
  F)	enumerater   is_quantized
dequantizerm   to_dense
is_complexr!   r   r   tocdoubler   is_mpsfloatgetattrunbinddoubler   r  r  r   r  )
original	reference
match_whatall_okr%   origreft_origt_refr   )check_tolerancer  r*   r+   compare_outputs   s   


	
z%_check_trace.<locals>.compare_outputstracezPython functionzrepeated tracec                 s   s    | ]}|d uV  qd S r;   r*   )rM   infor*   r*   r+   rO   M  s    z_check_trace.<locals>.<genexpr>)r    r!   r"   r   rz   jittrace_moduler!  rc   CompilationUnit_c_get_methodr   r   r.  anyr   )check_inputsfuncr   r,  r?   rC   is_trace_moduler   r   r  copied_dictr   data	check_modr  r-  traced_outsfn_outs
check_outs	diag_infor*   )r   r,  r   r   r  r  r   r   r+   _check_trace>  s   



	VD  r@  c                   @   s   e Zd Zedd ZdS )r  c                   C   s    t jdtdd t dd d S )Nignoreztorch.(?!jit))r  r6   ztorch::jit::fuser::cuda)r  filterwarningsr  r*   r*   r*   r+   ignore_lib_warningsR  s   z!TracerWarning.ignore_lib_warningsN)ri   rj   rk   staticmethodrC  r*   r*   r*   r+   r  Q  s    r  c                 C   s,   t | tjtfr| fS t | tst| S | S r;   )r    r!   r"   r   rT   )example_inputsr*   r*   r+   
make_tuplea  s
   
rF  c                 C   sR   t | tr| S tj| rtjjj}tjjj| |dddS |d u r#t	}|| |dS )NFTshare_types
is_tracingr   )
r    r   r!   _jit_internalmodule_has_exportsr0  
_recursive make_stubs_from_exported_methodscreate_script_moduleTopLevelTracedModule)r   r   r   infer_methods_stubs_fnr*   r*   r+   make_modulej  s   

rR  c                 C   s   | d u rd S dd | D S )Nc                 S   s   g | ]}d |iqS )rh   r*   )rM   cr*   r*   r+   r   |  r   z%wrap_check_inputs.<locals>.<listcomp>r*   )r6  r*   r*   r+   wrap_check_inputsx  s   rT  c                 C   s   dd l m  m} || }||}t||D ]K\}}|j|jkr% dS |jtjkr. dS t|t|kr9 dS t	|tj
jrC dS t	|tjr\|j|jkrR dS t||s[ dS q||krc dS qdS )Nr   FT)torch.utils._pytreeutils_pytreetree_leavesr   layoutr!   _mkldnnr1   r    _subclasses
FakeTensorr"   dtypeallclose)exportr.  pytreeflat_export
flat_tracer(  loadedr*   r*   r+   $analyze_ts_result_with_export_result  s,   

rd  gh㈵>c                 C   s  t | tjjrtd | S t | tjjr;|d u r%t |
tr!|
}nt	dt
| d|id |t|||||t |
t|dS t| drtt | jtjjrt| jdkrt|d u r]t |
trY|
}nt	dt
| jd|id |t|||||t |
t|dS t |tjtfr|
d u r|f}n|
d u rt |tst|}td}t| drt | jtjjrtdt| }t |
tr|
}tj|| |
|||t| }ntj|| ||||t| }|r|d urt|| ||||d|t |
td		 nt|g| ||||d|t |
td		 | |_|S )
Nz`The input to trace is already a ScriptModule, tracing it is a no-op. Returning the object as is.z%example_kwarg_inputs should be a dictrh   )r   r   r   r   zVtrace doesn't support compiling individual module's functions.
Please use trace_moduleFr   )r    r!   r0  r   r  r  nnr   r   r   r1  rT  hasattrr   ri   r"   rT   r-   AttributeErrorr	   rc   %_create_function_from_trace_with_dictr
   _create_function_from_tracer@  _torchdynamo_inline)r7  rE  optimizer   r6  r,  r?   r@   r   r   r   r   var_lookup_fnr   tracedr*   r*   r+   _trace_impl  s   




ro  c                   @   s&   e Zd ZdZdZdZdefddZdS )_ExportTypeDIRECT_EXPORTTRACE_AND_EXPORTSOURCE_TO_SOURCEreturnc                 C      | j S r;   r   r   r*   r*   r+   __str__+     z_ExportType.__str__N)ri   rj   rk   rq  rr  rs  r   rx  r*   r*   r*   r+   rp  &  s
    rp  c                   @   s*   e Zd ZdZdZdZdZdefddZdS )	_ExportOutcomeSUCCESSFAILED_TO_EXPORTFAILED_TO_RUNACCURACY_ERRORrt  c                 C   ru  r;   rv  rw  r*   r*   r+   rx  5  ry  z_ExportOutcome.__str__N)	ri   rj   rk   r{  r|  r}  r~  r   rx  r*   r*   r*   r+   rz  /  s    rz  c                    s  t s| S |durtjdtdd ddlm}mm} t| |||||||||	|
|}|dt	|d | rdd	l
m  dd
lm}m} t| ||d||||||	|
|d}|||
\}}fdd}dd } fdd}t|tjjsx||||tj ||||tj ||||tj |S )a  
    Trace a function and return an executable  or :class:`ScriptFunction` that will be optimized using just-in-time compilation.

    Tracing is ideal for code that operates only on
    ``Tensor``\\s and lists, dictionaries, and
    tuples of ``Tensor``\\s.

    Using `torch.jit.trace` and `torch.jit.trace_module`, you can turn an
    existing module or Python function into a TorchScript
    :class:`ScriptFunction` or :class:`ScriptModule`. You must provide example
    inputs, and we run the function, recording the operations performed on all
    the tensors.

    * The resulting recording of a standalone function produces `ScriptFunction`.
    * The resulting recording of `nn.Module.forward` or `nn.Module` produces
      `ScriptModule`.

    This module also contains any parameters that the original
    module had as well.

    Warning:
        Tracing only correctly records functions and modules which are not data
        dependent (e.g., do not have conditionals on data in tensors) and do not have
        any untracked external dependencies (e.g., perform input/output or
        access global variables). Tracing only records operations done when the given
        function is run on the given tensors. Therefore, the returned
        `ScriptModule` will always run the same traced graph on any input. This
        has some important implications when your module is expected to run
        different sets of operations, depending on the input and/or the module
        state. For example,

        * Tracing will not record any control-flow like if-statements or loops.
          When this control-flow is constant across your module, this is fine
          and it often inlines the control-flow decisions. But sometimes the
          control-flow is actually part of the model itself. For instance, a
          recurrent network is a loop over the (possibly dynamic) length of an
          input sequence.
        * In the returned :class:`ScriptModule`, operations that have different
          behaviors in ``training`` and ``eval`` modes will always behave as if
          it is in the mode it was in during tracing, no matter which mode the
          `ScriptModule` is in.

        In cases like these, tracing would not be appropriate and
        :func:`scripting <torch.jit.script>` is a better choice. If you trace
        such models, you may silently get incorrect results on subsequent
        invocations of the model. The tracer will try to emit warnings when
        doing something that may cause an incorrect trace to be produced.

    Args:
        func (callable or torch.nn.Module):  A Python function or `torch.nn.Module`
            that will be run with `example_inputs`. `func` arguments and return
            values  must be tensors or (possibly nested) tuples that contain
            tensors. When a module is passed `torch.jit.trace`, only the
            ``forward`` method is run and traced (see :func:`torch.jit.trace
            <torch.jit.trace_module>` for details).

    Keyword arguments:
        example_inputs (tuple or torch.Tensor or None, optional): A tuple of example
            inputs that will be passed to the function while tracing.
            Default: ``None``. Either this argument or ``example_kwarg_inputs``
            should be specified. The resulting trace can be run with inputs of
            different types and shapes assuming the traced operations support those
            types and shapes. `example_inputs` may also be a single Tensor in which
            case it is automatically wrapped in a tuple. When the value is None,
            ``example_kwarg_inputs`` should be specified.

        check_trace (``bool``, optional): Check if the same inputs run through
            traced code produce the same outputs. Default: ``True``. You might want
            to disable this if, for example, your network contains non-
            deterministic ops or if you are sure that the network is correct despite
            a checker failure.

        check_inputs (list of tuples, optional): A list of tuples of input
            arguments that should be used to check the trace against what is
            expected. Each tuple is equivalent to a set of input arguments that
            would be specified in ``example_inputs``. For best results, pass in
            a set of checking inputs representative of the space of shapes and
            types of inputs you expect the network to see.  If not specified,
            the original ``example_inputs`` are used for checking
        check_tolerance (float, optional): Floating-point comparison tolerance
            to use in the checker procedure.  This can be used to relax the
            checker strictness in the event that results diverge numerically
            for a known reason, such as operator fusion.
        strict (``bool``, optional): run the tracer in a strict mode or not
            (default: ``True``). Only turn this off when you want the tracer to
            record your mutable container types (currently ``list``/``dict``)
            and you are sure that the container you are using in your
            problem is a ``constant`` structure and does not get used as
            control flow (if, for) conditions.
        example_kwarg_inputs (dict, optional): This parameter is a pack of keyword
            arguments of example inputs that will be passed to the function while
            tracing. Default: ``None``. Either this argument or ``example_inputs``
            should be specified. The dict will be unpacking by the arguments name
            of the traced function. If the keys of the dict don't not match with
            the traced function's arguments name, a runtime exception will be raised.

    Returns:
        If `func` is `nn.Module` or ``forward`` of `nn.Module`, `trace` returns
        a :class:`ScriptModule` object with a single ``forward`` method
        containing the traced code.  The returned `ScriptModule` will
        have the same set of sub-modules and parameters as the original
        ``nn.Module``.  If ``func`` is a standalone function, ``trace``
        returns `ScriptFunction`.

    Example (tracing a function):

    .. testcode::

        import torch

        def foo(x, y):
            return 2 * x + y

        # Run `foo` with the provided inputs and record the tensor operations
        traced_foo = torch.jit.trace(foo, (torch.rand(3), torch.rand(3)))

        # `traced_foo` can now be run with the TorchScript interpreter or saved
        # and loaded in a Python-free environment

    Example (tracing an existing module)::

        import torch
        import torch.nn as nn


        class Net(nn.Module):
            def __init__(self) -> None:
                super().__init__()
                self.conv = nn.Conv2d(1, 1, 3)

            def forward(self, x):
                return self.conv(x)


        n = Net()
        example_weight = torch.rand(1, 1, 3, 3)
        example_forward_input = torch.rand(1, 1, 3, 3)

        # Trace a specific method and construct `ScriptModule` with
        # a single `forward` method
        module = torch.jit.trace(n.forward, example_forward_input)

        # Trace a module (implicitly traces `forward`) and construct a
        # `ScriptModule` with a single `forward` method
        module = torch.jit.trace(n, example_forward_input)

    N^`optimize` is deprecated and has no effect. Use `with torch.jit.optimized_execution()` instead   r  r   )check_if_torch_exportable!log_torch_jit_trace_exportabilitylog_torchscript_usager.  )model_idTS2EPConverter)"_convert_ts_to_export_experimental$_process_jit_trace_inputs_for_exportF)rE  rl  r   r6  r,  r?   r@   r   r   r   r   c           	   
      s4  z| | }W n! t y' } z|} dt|ttjd W Y d }~d S d }~ww z|| |}W n! t yP } z dt|ttjt| W Y d }~d S d }~ww z|| }W n! t yx } z dt|ttjt| W Y d }~d S d }~ww t||s dt|ttjd d S  dt|ttjd d S )Nr.  	succeededzaccuracy error)r   r   rz  r{  r|  r}  rd  r~  )	func_to_exportexport_funcexport_argsexport_typetraced_resultr   rY   	ep_moduler_  )r  r*   r+   _log_exportability  sT   	
z!trace.<locals>._log_exportabilityc                 S   s   t jj| |dd S )NF)r?   )r!   r_  r6   r7  r  r*   r*   r+   _direct_export_and_lower>  s   z'trace.<locals>._direct_export_and_lowerc                    s    | |   S r;   )convertr6   r  r  r*   r+   &_convert_ts_to_export_source_to_sourceA  s   z5trace.<locals>._convert_ts_to_export_source_to_source)r   r  r  FutureWarningtorch._utils_internalr  r  r  ro  r   torch._export.converterr  torch.export._tracer  r  r    r!   r0  r   rp  rq  rr  rs  )r7  rE  rl  r   r6  r,  r?   r@   r   r   r   r   r  r  r   r  r  traced_func_for_exportr  rY   r  r  r  r*   )r  r  r+   r.  9  s    "*r.  _trace_module_mapc                    s  t s| S |durtjdtdd td}t| tjjst	dt|t
s(t	dtjjj}zi  fdd	 | d
< tjj_ | d
 t| ||	}| D ]\}}|dkrc| }t| |}t|}n	t| |}t|}t|t
r|
r|D ]}||vrdd| d }td| d| qu|j|||||||| nt|}|j|||||||| |j|}|r|durt||||||d||
d	 qOt|g|||||d||
d	 qOW |tjj_|S |tjj_w )a  
    Trace a module and return an executable :class:`ScriptModule` that will be optimized using just-in-time compilation.

    When a module is passed to :func:`torch.jit.trace <torch.jit.trace>`, only
    the ``forward`` method is run and traced. With ``trace_module``, you can specify a dictionary of
    method names to example inputs to trace (see the ``inputs``) argument below.

    See :func:`torch.jit.trace <torch.jit.trace>` for more information on tracing.

    Args:
        mod (torch.nn.Module):  A ``torch.nn.Module`` containing methods whose names are
                                specified in ``inputs``. The given methods will be compiled
                                as a part of a single `ScriptModule`.
        inputs (dict):  A dict containing sample inputs indexed by method names in ``mod``.
                                The inputs will be passed to methods whose names correspond to inputs'
                                keys while tracing.
                                ``{ 'forward' : example_forward_input, 'method2': example_method2_input}``
    Keyword arguments:
        check_trace (``bool``, optional): Check if the same inputs run through
                                      traced code produce the same outputs. Default: ``True``. You might want
                                      to disable this if, for example, your network contains non-
                                      deterministic ops or if you are sure that the network is correct despite
                                      a checker failure.

        check_inputs (list of dicts, optional): A list of dicts of input arguments that should be used
                                                 to check the trace against what is expected. Each tuple
                                                 is equivalent to a set of input arguments that would
                                                 be specified in ``inputs``. For best results, pass in a
                                                 set of checking inputs representative of the space of
                                                 shapes and types of inputs you expect the network to see.
                                                 If not specified, the original ``inputs`` are used for checking
        check_tolerance (float, optional): Floating-point comparison tolerance to use in the checker procedure.
                                           This can be used to relax the checker strictness in the event that
                                           results diverge numerically for a known reason, such as operator fusion.
        example_inputs_is_kwarg (``bool``, optional): This parameter indicate whether the example inputs is a pack
                                           pack of keyword arguments. Default: ``False``.

    Returns:
        A :class:`ScriptModule` object with a single ``forward`` method containing the traced code.
        When ``func`` is a ``torch.nn.Module``, the returned :class:`ScriptModule` will have the same set of
        sub-modules and parameters as ``func``.

    Example (tracing a module with multiple methods)::

        import torch
        import torch.nn as nn


        class Net(nn.Module):
            def __init__(self) -> None:
                super().__init__()
                self.conv = nn.Conv2d(1, 1, 3)

            def forward(self, x):
                return self.conv(x)

            def weighted_kernel_sum(self, weight):
                return weight * self.conv.weight


        n = Net()
        example_weight = torch.rand(1, 1, 3, 3)
        example_forward_input = torch.rand(1, 1, 3, 3)

        # Trace a specific method and construct `ScriptModule` with
        # a single `forward` method
        module = torch.jit.trace(n.forward, example_forward_input)

        # Trace a module (implicitly traces `forward`) and construct a
        # `ScriptModule` with a single `forward` method
        module = torch.jit.trace(n, example_forward_input)

        # Trace specific methods on a module (specified in `inputs`), constructs
        # a `ScriptModule` with `forward` and `weighted_kernel_sum` methods
        inputs = {"forward": example_forward_input, "weighted_kernel_sum": example_weight}
        module = torch.jit.trace_module(n, inputs)

    Nr  r  r  r   z.expected torch.nn.Module as the first argumentz3expected a dictionary of (method_name, input) pairsc                    s4   |   D ]\}}|d | }||<  || qd S )N.)named_children)r   prefixr   childsubmod_qualnameregister_submodstrace_module_mapr*   r+   r    s
   z&trace_module.<locals>.register_submods__modulerh   [,]'z\' is not in forward() method's arguments,
                         valid arguments name are Tre  )r   r  r  r  r-   r    r!   rf  r   rh  r   r0  _tracer  rR  r   r!  r
   r   	NameErrorr3  #_create_method_from_trace_with_dictrF  _create_method_from_tracer4  r@  )r   r  rl  r   r6  r,  r?   r@   r   r   r   r   rm  old_module_mapr6   method_namerE  r7  forward_methodargument_nameskeyvalid_argumentscheck_trace_methodr*   r  r+   r1  `  s   \







Ir1  c                   C   s   t  rdS tj S )zReturn a boolean value.

    Returns ``True`` in tracing (if a function is called during the
    tracing of code with ``torch.jit.trace``) and ``False`` otherwise.
    F)r   r!   rc   _is_tracingr*   r*   r*   r+   rI  ,  s   
rI  c                       sR   e Zd ZdZd fdd	Zdd Z fddZ fd	d
Zdd Zdd Z	  Z
S )TracedModuleTNc                    s  t    t|tjjsJ t  G dd dtjj}tjt	||_
| } fdd}|j|_|j D ]\}}|d urG||j|< || q6|j D ]\}}	|	d ur^|	|j|< ||	 qM|j D ]\}}
tj|
r~||jvr~||jvr~t|||
 qd|jrtdt| |j D ]\}}|d u rqt|td d|j|< qtjjj|dd d	d
d}t	|j| jd< || jd< dD ]}t| | qd S )Nc                   @   s   e Zd ZdS )z.TracedModule.__init__.<locals>.QualnameWrapperN)ri   rj   rk   r*   r*   r*   r+   QualnameWrapperG  s    r  c                    s   |  v rt d |  d S )Nz=TracedModules don't support parameter sharing between modules)r   r4   )paramid_setr*   r+   check_uniqueP  s
   z+TracedModule.__init__.<locals>.check_uniquez=Modules that have backward hooks assigned can't be compiled: rJ  c                 S   s   dS )Nr*   r*   )r6   r*   r*   r+   rv   w  s    z'TracedModule.__init__.<locals>.<lambda>FTrG  _name_actual_script_module)_parameters_buffers_modulestraining)r<   r=   r    r!   rf  r   r2   rK  r	   r1   _jit_override_qualnamer  r  r   r  __dict__rc   _jit_is_script_objectsetattr_backward_hooksr   r   r  rR  r  r0  rM  rO  ri   delattr)r   r(  r  r   r  
tmp_moduler  r   r  bufval	submodulescript_modulerF   r  r+   r=   :  s^   






zTracedModule.__init__c                 O   s   t d)Nz"Trace submodules cannot be called.)r   )r   rH   kwargsr*   r*   r+   rh     s   zTracedModule.forwardc                    s"   d| j vrt |S t| j|S Nr  )r  r<   __getattr__r!  r  )r   attrrF   r*   r+   r    s   
zTracedModule.__getattr__c                    s*   d| j vrt ||S t| j|| d S r  )r  r<   __setattr__r  r  )r   r  r   rF   r*   r+   r    s   
zTracedModule.__setattr__c                 C   ru  r;   r  rw  r*   r*   r+   	_get_name  ry  zTracedModule._get_namec                 C   s   d| j  S )Nzoriginal_name=r  rw  r*   r*   r+   
extra_repr  s   zTracedModule.extra_repr)NN)ri   rj   rk   _disable_script_metar=   rh   r  r  r  r  rl   r*   r*   rF   r+   r  7  s    Er  c                   @   s,   e Zd ZU e Zedef ed< dd ZdS )rP  .rh   c                 C   s   | j d | dS )z
        Re-construct an instance of TopLevelTracedModule using an instance of a C++ module.

        Args:
            cpp_module: The C++ module that this TopLevelTracedModule will be rebuilt around.
        r  N)r  _reconstruct)r   
cpp_moduler*   r*   r+   r    s   z!TopLevelTracedModule._reconstructN)	ri   rj   rk   r   rh   r   r   __annotations__r  r*   r*   r*   r+   rP    s   
 rP  fnrt  c                    s:   t  dtjdtjdtf fdd _d_S )NrH   r  rt  c                     s,   t  s
 | i |S tj}|| i |S r;   )rI  r   __original_fn)rH   r  r   r  r`   r*   r+   r`     s   
z#_script_if_tracing.<locals>.wrapperT)	functoolswrapsr   rH   r  r   r  __script_if_tracing_wrapper)r  r*   r  r+   _script_if_tracing  s
   "r  r*   c                 C   s:   |du ri }t |ts|f}t| |||||i |}|S )a  Return a tuple on tracing a function or model.

    .. warning::
        This function is internal-only and should only be used by the ONNX
        exporter. If you are trying to get a graph through tracing, please go
        through the public API instead::

            trace = torch.jit.trace(nn.LSTMCell(), (input, hidden))
            trace_graph = trace.graph

    Trace a function or model, returning a tuple consisting of the both the
    *trace* of an execution, as well as the original return value. If return_inputs,
    also returns the trace inputs as part of the tuple

    Tracing is guaranteed not to change the semantics of the function/module
    that is traced.

    Args:
        f (torch.nn.Module or function): the function or module
            to be traced.
        args (tuple or Tensor): the positional arguments to pass to the
            function/module to be traced.  A non-tuple is assumed to
            be a single positional argument to be passed to the model.
        kwargs (dict): the keyword arguments to pass to the function/module
            to be traced.

    Example (trace a cell):

    .. testcode::

        trace = torch.jit.trace(nn.LSTMCell(), (input, hidden))
    N)r    rT   r:   )frH   r  r?   r@   rD   rB   r^   r*   r*   r+   _get_trace_graph  s   )

r  )r   )F)T)r*   NTFFF)U__doc__
contextlibr   r  r   osr   r  enumr   typingr   r   r   r   typing_extensionsr   r!   torch._jit_internalr   r	   r
   r   torch.autogradr   torch.jit._scriptr   r   r   torch.jit._stater   r   torch.nnr   torch.testing._comparisonr   rc   _jit_flattenrU   _jit_unflattenrS   r   r   r-   r9   rf  r:   rz   environgetr   _JIT_DISABLE
_JIT_STATScontextmanagerr   r   r   r   r   r   r   no_gradr@  Warningr  rC  _tracer_warn_use_pythonrF  rR  rT  rd  ro  r   rp  rz  r.  r  r   r  r1  rI  r  rP  r  r  r*   r*   r*   r+   <module>   s   


EZ
  
	
 	
  &
 M\"