
    (^i#                    n    d dl mZ d dlZd dlmZ d	dZd	dZd
dZ	 	 	 	 	 	 	 	 ddZ		 d	 	 	 	 	 	 	 ddZ
y)    )annotationsN)_is_pareto_frontc                    | j                   d   |j                   d   cxk(  rdk(  sJ  J t        j                  |dd  | d ddf   g      }|d   | d d df   z
  }|| d d df   z
  }||z  S )N   r      )shapenpconcatenate)sorted_pareto_solsreference_pointrect_diag_yedge_length_xedge_length_ys        X/var/www/html/hubwallet-dev/venv/lib/python3.12/site-packages/optuna/_hypervolume/wfg.py_compute_2dr      s    ##A&/*?*?*BGaGGGGG../!""57I#2#q&7Q!RSK#A&);AqD)AAM"4QT"::M=((    c                   | j                   d   |j                   d   cxk(  rdk(  sJ  J | j                   d   }t        j                  | dddf         }t        j                  ||ft              }|d   | |df   z
  ||t        j
                  |      f<   t        j                  j                  t        j                  j                  |d      d      }| dddf   }| |df   }t        j                  |dd |dd g      |z
  }t        j                  |dd |dd g      |z
  }t        j                  t        j                  ||      |      S )aw  
    Compute hypervolume in 3D. Time complexity is O(N^2) where N is sorted_pareto_sols.shape[0].
    If X, Y, Z coordinates are permutations of 0, 1, ..., N-1 and reference_point is (N, N, N), the
    hypervolume is calculated as the number of voxels (x, y, z) dominated by at least one point.
    If we fix x and y, this number is equal to the minimum of z' over all points (x', y', z')
    satisfying x' <= x and y' <= y. This can be efficiently computed using cumulative minimum
    (`np.minimum.accumulate`). Non-permutation coordinates can be transformed into permutation
    coordinates by using coordinate compression.
    r   r      N)dtyper   axis)
r	   r
   argsortzerosfloatarangemaximum
accumulater   dot)	r   r   ny_orderz_deltax_valsy_valsx_deltay_deltas	            r   _compute_3dr'      sK    ##A&/*?*?*BGaGGGGG  #Ajj+AqD12Ghh1vU+G%4Q%7:LWVWZ:X%XGGRYYq\!"jj##BJJ$9$9'$9$JQR#SG1%F
+FnnfQRj/"1*=>?&HGnnfQRj/!A*>?@6IG66"&&'*G44r   c                $  
 | j                   d   dk(  r,d}t        | d         D ]  \  }}|||z
  z  } t        |      S | j                   d   dk(  rLd\  }}}t        | d   | d         D ](  \  }}}	|||z
  z  }|||	z
  z  }||t        ||	      z
  z  }* ||z   |z
  S | z
  j	                  d      
t        j                  | d d t
        j                  f   |       
d   t        
fdt        
j                  dz
        D              z   S )	Nr   r         ?r   )r)   r)   r)   r   r   c              3  R   K   | ]  }t        ||d z   df   |            yw)r   N)_compute_exclusive_hv).0iinclusive_hvslimited_sols_arrayr   s     r   	<genexpr>z_compute_hv.<locals>.<genexpr>=   s8      # 	0AEG<mA>NP_`#s   $')r	   zipr   maxprodr
   r   newaxissumrangesize)sorted_loss_valsr   inclusive_hvrvhv1hv2intersecv1v2r.   r/   s    `        @@r   _compute_hvrA   )   sN   a A%)9!)<= 	"DAqAE!L	"\""				"a	' +S(_.>q.ACSTUCVW 	(IAr21r6MC1r6MCCBK'H	( Sy8##$'77==2=FM$4Q

]$CEUVs #}))A-.#    r   c                    | j                   d   dk\  sJ | j                   d   dk  r|t        | |      z
  S t        | d      }|t        | |   |      z
  S )Nr   r   r   Tassume_unique_lexsorted)r	   rA   r   )limited_solsr9   r   on_fronts       r   r+   r+   C   sg     a A%%%!!k,HHH<  dKH+l8&<oNNNr   c                2   t        j                  | |k        st        d      t        j                  t        j                  |            st	        d      S | j
                  dk(  ry|s*t        j                  | d      }t        |d      }||   }n| | dddf   j                            }|j                  d   d	k(  rt        ||      }n+|j                  d   d
k(  rt        ||      }nt        ||      }t        j                  |      r|S t	        d      S )aO  Hypervolume calculator for any dimension.

    This class exactly calculates the hypervolume for any dimension.
    For 3 dimensions or higher, the WFG algorithm will be used.
    Please refer to ``A Fast Way of Calculating Exact Hypervolumes`` for the WFG algorithm.

    .. note::
        This class is used for computing the hypervolumes of points in multi-objective space.
        Each coordinate of each point represents a ``values`` of the multi-objective function.

    .. note::
        We check that each objective is to be minimized. Transform objective values that are
        to be maximized before calling this class's ``compute`` method.

    Args:
        loss_vals:
            An array of loss value vectors to calculate the hypervolume.
        reference_point:
            The reference point used to calculate the hypervolume.
        assume_pareto:
            Whether to assume the Pareto optimality to ``loss_vals``.
            In other words, if ``True``, none of loss vectors are dominated by another.
            ``assume_pareto`` is used only for speedup and it does not change the result even if
            this argument is wrongly given. If there are many non-Pareto solutions in
            ``loss_vals``, ``assume_pareto=True`` will speed up the calculation.

    Returns:
        The hypervolume of the given arguments.

    zAll points must dominate or equal the reference point. That is, for all points in the loss_vals and the coordinate `i`, `loss_vals[i] <= reference_point[i]`.infr   g        r   TrC   Nr   r   )r
   all
ValueErrorisfiniter   r7   uniquer   r   r	   r   r'   rA   )	loss_valsr   assume_paretounique_lexsorted_loss_valsrF   r   hvs          r   compute_hypervolumerQ   k   s   D 66)./4
 	

 66"++o./U|~~%'YYyq%A"#$>X\]7A 'yA'>'>'@AQ1$+_=			q	!Q	& +_=+_= R22eEl2r   )r   
np.ndarrayr   rR   returnr   )r8   rR   r   rR   rS   r   )rE   rR   r9   r   r   rR   rS   r   )F)rM   rR   r   rR   rN   boolrS   r   )
__future__r   numpyr
   optuna.study._multi_objectiver   r   r'   rA   r+   rQ    r   r   <module>rY      sy    "  :)524%O%O,1%ODN%O
%OR OTG3G3,6G3GKG3
G3r   