a
    hF                     @   sf   d dl mZmZ d dlZd dlmZ ddlmZmZmZ dgZ	dddZ
dddZG dd deZdS )    )OptionalUnionN)Tensor   )
_to_scalar	OptimizerParamsTLBFGSc                 C   s   |d ur|\}}n| |kr"| |fn|| f\}}|| d||  | |   }	|	d ||  }
|
dkr|
  }| |kr|||  || |	 || d|     }n(| | | || |	 || d|     }tt|||S || d S d S )N      r   g       @)sqrtminmax)x1f1Zg1Zx2f2Zg2boundsZ
xmin_boundZ
xmax_boundd1Z	d2_squareZd2Zmin_pos r   ?/var/www/auris/lib/python3.9/site-packages/torch/optim/lbfgs.py_cubic_interpolate   s    
	*(r   -C6??&.>   c           !   	   C   s   |   }|jtjd}| |||\}}d}||}d|||f\}}}}d}d}||
k r|||| |  ks|dkr||kr||g}||g}||jtjdg}||g}qt || | kr|g}|g}|g}d}q|dkr||g}||g}||jtjdg}||g}q|d||   }|d }|}t||||||||fd}|}|}|jtjd}|}| |||\}}|d7 }||}|d7 }qT||
krd|g}||g}||g}d}|d |d	 krd
nd\}}|s||
k rt |d |d  | |	k rqt|d |d |d |d |d |d }dt|t|  } tt|| |t| | k r|s|t|ks|t|krt |t| t |t| k rt||  }nt||  }d}nd}nd}| |||\}}|d7 }||}|d7 }|||| |  ks ||| krj|||< |||< |jtjd||< |||< |d |d kr`d
nd\}}nt || | krd}nJ||| ||   dkr|| ||< || ||< || ||< || ||< |||< |||< |jtjd||< |||< q|| }|| }|| }||||fS )NZmemory_formatr   r   FTg{Gz?
   )r   )r   r   )r   r   g?)absr   clonetorchcontiguous_formatdotr   r   )!obj_funcxtdfggtdZc1c2tolerance_changeZmax_lsZd_normZf_newZg_newls_func_evalsZgtd_newZt_prevZf_prevZg_prevZgtd_prevdoneZls_iterZbracketZ	bracket_fZ	bracket_gZbracket_gtdZmin_stepZmax_steptmpZinsuf_progressZlow_posZhigh_posZepsr   r   r   _strong_wolfe)   s    

$



 ""
$ r/   c                
       s   e Zd ZdZdeeeef ee	e eeee	e
 d fd	d
Zdd Zdd Zdd Zdd Zdd Zdd Ze dd Z  ZS )r	   a  Implements L-BFGS algorithm.

    Heavily inspired by `minFunc
    <https://www.cs.ubc.ca/~schmidtm/Software/minFunc.html>`_.

    .. warning::
        This optimizer doesn't support per-parameter options and parameter
        groups (there can be only one).

    .. warning::
        Right now all parameters have to be on a single device. This will be
        improved in the future.

    .. note::
        This is a very memory intensive optimizer (it requires additional
        ``param_bytes * (history_size + 1)`` bytes). If it doesn't fit in memory
        try reducing the history size, or use a different algorithm.

    Args:
        params (iterable): iterable of parameters to optimize. Parameters must be real.
        lr (float, optional): learning rate (default: 1)
        max_iter (int, optional): maximal number of iterations per optimization step
            (default: 20)
        max_eval (int, optional): maximal number of function evaluations per optimization
            step (default: max_iter * 1.25).
        tolerance_grad (float, optional): termination tolerance on first order optimality
            (default: 1e-7).
        tolerance_change (float, optional): termination tolerance on function
            value/parameter changes (default: 1e-9).
        history_size (int, optional): update history size (default: 100).
        line_search_fn (str, optional): either 'strong_wolfe' or None (default: None).
    r      NHz>r   d   )paramslrmax_itermax_evaltolerance_gradr+   history_sizeline_search_fnc	           
   	      s   t |tr| dkrtdd|ks4td| |d u rH|d d }t|||||||d}	t ||	 t| jdkrtd| jd	 d
 | _	d | _
d S )Nr   zTensor lr must be 1-elementg        zInvalid learning rate:       )r4   r5   r6   r7   r+   r8   r9   z>LBFGS doesn't support per-parameter options (parameter groups)r   r3   )
isinstancer   numel
ValueErrordictsuper__init__lenparam_groups_params_numel_cache)
selfr3   r4   r5   r6   r7   r+   r8   r9   defaults	__class__r   r   rA      s,    	zLBFGS.__init__c                 C   s&   | j d u r tdd | jD | _ | j S )Nc                 s   s,   | ]$}t |rd |  n| V  qdS )r   N)r    
is_complexr=   .0pr   r   r   	<genexpr>   s   zLBFGS._numel.<locals>.<genexpr>)rE   sumrD   rF   r   r   r   _numel   s
    

zLBFGS._numelc                 C   s   g }| j D ]l}|jd u r,||  }n&|jjrF|j d}n|jd}t	|rlt
|d}|| q
t|dS )Nr   r   )rD   Zgradnewr=   Zzero_Z	is_sparseZto_denseviewr    rJ   view_as_realappendcat)rF   ZviewsrM   rS   r   r   r   _gather_flat_grad  s    


zLBFGS._gather_flat_gradc                 C   sh   d}| j D ]H}t|r"t|}| }|j||||  ||d ||7 }q
||  ksdJ d S )Nr   alpha)rD   r    rJ   rT   r=   add_Zview_asrQ   )rF   Z	step_sizeupdateoffsetrM   r=   r   r   r   	_add_grad  s    


 
zLBFGS._add_gradc                 C   s   dd | j D S )Nc                 S   s   g | ]}|j tjd qS )r   )r   r    r!   rK   r   r   r   
<listcomp>       z&LBFGS._clone_param.<locals>.<listcomp>)rD   rP   r   r   r   _clone_param  s    zLBFGS._clone_paramc                 C   s$   t | j|D ]\}}|| qd S N)ziprD   copy_)rF   Zparams_datarM   Zpdatar   r   r   
_set_param"  s    zLBFGS._set_paramc                 C   s0   |  || t| }|  }| | ||fS ra   )r]   floatrW   rd   )rF   closurer$   r%   r&   loss	flat_gradr   r   r   _directional_evaluate&  s
    

zLBFGS._directional_evaluatec           &         s  t jdksJ t   jd }t|d }|d }|d }|d }|d }|d }|d	 }	jjd  }
|
d
d |
dd   }t|}d}|
d
  d7  < 	 }|
  |k}|r|S |
d}|
d}|
d}|
d}|
d}|
d}|
d}|
d}d}||k rx|d7 }|
d  d7  < |
d dkrn| }g }g }g }d}nN||}||}||}|dkrt ||	kr|d |d |d || || |d|  ||| }t |}d|
vrdg|	 |
d< |
d }| }t|d ddD ]8}|| |||  ||< |j|| ||  d q2t|| }} t|D ]6}|| | ||  }!| j|| || |! d q|du r|jtjd}n
|| |}|
d dkrtdd|
   | }n|}||}"|"| kr,qxd}#|dur|dkrNtdn2 }$ fdd}%t|%|$|||||"\}}}}#|| |
  |k}nf|| ||krt  t  }W d   n1 s0    Y  	 }|
  |k}d}#||#7 }|
d
  |#7  < ||kr*qx||kr8qx|rBqx||
  |kr^qxt
|| |k r"qxq"||
d< ||
d< ||
d< ||
d< ||
d< ||
d< ||
d< ||
d< |S )zPerform a single optimization step.

        Args:
            closure (Callable): A closure that reevaluates the model
                and returns the loss.
        r   r   r4   r5   r6   r7   r+   r9   r8   Z
func_evalsn_iterr&   r%   old_dirsold_stpsroH_diagprev_flat_grad	prev_lossg|=g      ?alNr   rX   r   Zstrong_wolfez only 'strong_wolfe' is supportedc                    s     | ||S ra   )ri   )r$   r%   r&   rf   rF   r   r   r#     s    zLBFGS.step.<locals>.obj_func)rB   rC   r    Zenable_gradr   staterD   
setdefaultre   rW   r   r   getnegsubmulr"   poprU   rangerZ   r   r!   rc   r   rO   RuntimeErrorr`   r/   r]   )&rF   rf   groupr4   r5   r6   r7   r+   r9   r8   rs   Z	orig_lossrg   Zcurrent_evalsrh   Zopt_condr&   r%   rk   rl   rm   rn   ro   rp   rj   ysZysZnum_oldrq   qirZbe_ir)   r,   Zx_initr#   r   rr   r   step-  s    



























*

z
LBFGS.step)r   r0   Nr1   r   r2   N)__name__
__module____qualname____doc__r   r   re   r   intr   strrA   rQ   rW   r]   r`   rd   ri   r    Zno_gradr   __classcell__r   r   rH   r   r	      s4   $       
$	)N)r   r   r   r   )typingr   r   r    r   Z	optimizerr   r   r   __all__r   r/   r	   r   r   r   r   <module>   s   
 
 