
    iKP             
         d dl mZ d dlZd dlZd dlZd dlZd dlZd dlZd dlZd dl	Z	d dl
Z
d dlZd dlmZ d dlmZmZ d dlmZ d dlmZmZmZmZmZ d dlZd dlZd dlZd dlmZmZ d dl m!Z! d d	l"m#Z# d d
lm$Z$m%Z% d dl&m'Z' d dl(m&Z& d dl)m*Z*  ejV                  e,      Z-erd dl.m/Z/ d dl0m1Z1 d dl2m3Z3 dCdZ4dDdZ5dCdZ6dEdZ7dFdZ8dEdZ9dFdZ:dEdZ;dGdZ<dFdZ=dHdZ>dFdZ?dHdZ@dIdZAdJdZBedKd        ZCedLd!       ZCdMd"ZCdNd#ZDd$d%d&d'd(d)e8dddf
	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dOd*ZEd&d'd(d)e8f	 	 	 	 	 	 	 	 	 	 	 	 	 dPd+ZFdQd,ZGd)d(d-e8f	 	 	 	 	 	 	 	 	 	 	 	 	 dRd.ZH	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dS	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dTd/ZIdUd0ZJdVd1ZKdWd2ZLdXd3ZM	 	 	 	 dY	 	 	 	 	 	 	 	 	 	 	 dZd4ZN G d5 d6e&      ZOeej                  fd7       ZQ	 	 	 	 d[	 	 	 	 	 	 	 	 	 	 	 d\d8ZR	 	 	 	 	 d]	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d^d9ZS	 	 	 	 d[	 	 	 	 	 	 	 	 	 	 	 	 	 d_d:ZTd; ZUdWd<ZVd`d=ZWdad>ZXdad?ZYdad@ZZedA        Z[dB Z\y)b    )annotationsN)contextmanager)PackageNotFoundErrormetadata)Path)TYPE_CHECKINGAnyCallableLiteraloverload)hf_hub_downloadsnapshot_download)
coo_matrix)pairwise_distances)Tensordevice)trange)tqdm)is_torch_npu_availableDataset)CrossEncoder)SentenceTransformerc                   t        | t              rwt        d | D              rOt        j                  | D cg c]0  }|j                         j                  t        j                        2 c}      S t        j                  |       } n%t        | t              st        j                  |       } | j                  r | j                  t        j                        S | S c c}w )a  
    Converts the input `a` to a PyTorch tensor if it is not already a tensor.
    Handles lists of sparse tensors by stacking them.

    Args:
        a (Union[list, np.ndarray, Tensor]): The input array or tensor.

    Returns:
        Tensor: The converted tensor.
    c              3  X   K   | ]"  }t        |t              xr |j                   $ y wN)
isinstancer   	is_sparse).0xs     X/var/www/html/eduruby.in/venv/lib/python3.12/site-packages/sentence_transformers/util.py	<genexpr>z%_convert_to_tensor.<locals>.<genexpr>3   s#     @z!V$44@s   (*dtype)r   listalltorchstackcoalescetofloat32tensorr   r   )ar    s     r!   _convert_to_tensorr.   &   s     !T@a@@;;aP

emm DPQQQA6"LLO{{tt%--t((H  Qs   5Cc                N    | j                         dk(  r| j                  d      } | S )z
    If the tensor `a` is 1-dimensional, it is unsqueezed to add a batch dimension.

    Args:
        a (Tensor): The input tensor.

    Returns:
        Tensor: The tensor with a batch dimension.
       r   )dim	unsqueezer-   s    r!   _convert_to_batchr4   ?   s#     	uuw!|KKNH    c                d    t        |       } | j                         dk(  r| j                  d      } | S )a  
    Converts the input data to a tensor with a batch dimension.
    Handles lists of sparse tensors by stacking them.

    Args:
        a (Union[list, np.ndarray, Tensor]): The input data to be converted.

    Returns:
        Tensor: The converted tensor with a batch dimension.
    r0   r   )r.   r1   r2   r3   s    r!   _convert_to_batch_tensorr7   N   s-     	1Auuw!|KKNHr5   c                    t        | |      S )  
    Computes the cosine similarity between two tensors.

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Matrix with res[i][j] = cos_sim(a[i], b[j])
    )cos_simr-   bs     r!   pytorch_cos_simr=   _   s     1a=r5   c                    t        |       } t        |      }t        |       }t        |      }t        j                  ||j	                  dd            j                         S )r9   r   r0   )r7   normalize_embeddingsr'   mm	transposeto_denser-   r<   a_normb_norms       r!   r:   r:   m   sS     	!#A #A!!$F!!$F88FF,,Q23<<>>r5   c                (   t        |       } t        |      }| j                  s|j                  r9t        |       }t        |      }||z  j                  d      j	                         S t        t        |       t        |            j	                         S )a  
    Computes the pairwise cosine similarity cos_sim(a[i], b[i]).

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Vector with res[i] = cos_sim(a[i], b[i])
    r1   )r.   r   r?   sumrB   pairwise_dot_scorerC   s       r!   pairwise_cos_simrK      s     	1A1A 	{{akk%a(%a($$$,5577!"6q"9;OPQ;RS\\^^r5   c                    t        |       } t        |      }t        j                  | |j                  dd            j	                         S )a  
    Computes the dot-product dot_prod(a[i], b[j]) for all i and j.

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Matrix with res[i][j] = dot_prod(a[i], b[j])
    r   r0   )r7   r'   r@   rA   rB   r;   s     r!   	dot_scorerM      s=     	!#A #A88Aq{{1a()2244r5   c                t    t        |       } t        |      }| |z  j                  d      j                         S )a  
    Computes the pairwise dot-product dot_prod(a[i], b[i]).

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Vector with res[i] = dot_prod(a[i], b[i])
    rG   rH   )r.   rI   rB   r;   s     r!   rJ   rJ      s6     	1A1AE;;2;''))r5   c                   | j                         } | j                         j                         j                         }| j	                         j                         j                         }t        ||d   |d   ff| j                        S )Nr   r0   )shape)r)   indicescpunumpyvaluesr   rP   )r    rQ   rT   s      r!   to_scipy_coorU      sf    	

Aiikoo%%'GXXZ^^##%Fv
GAJ78HHr5   c                   t        |       } t        |      }| j                  s|j                  rt        j                  d       t	        |       }t	        |      }t        ||d      }t        j                  |       j                         j                  | j                        j                         S t        j                  | |d      j                          S )a  
    Computes the manhattan similarity (i.e., negative distance) between two tensors.
    Handles sparse tensors without converting to dense when possible.

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Matrix with res[i][j] = -manhattan_distance(a[i], b[j])
    z8Using scipy for sparse Manhattan similarity computation.	manhattan)metricg      ?p)r7   r   loggerwarningrU   r   r'   
from_numpyfloatr*   r   rB   cdist)r-   r<   a_coob_coodists        r!   manhattan_simrc      s     	!#A #A{{akkQRQQ!%{C&,,.11!((;DDFF AqC(11333r5   c                    t        |       } t        |      }t        j                  t        j                  | |z
        d      j	                          S )a<  
    Computes the manhattan similarity (i.e., negative distance) between pairs of tensors.

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Vector with res[i] = -manhattan_distance(a[i], b[i])
    rG   rH   )r.   r'   rI   absrB   r;   s     r!   pairwise_manhattan_simrf      sB     	1A1AIIeiiA&B/88:::r5   c                l   t        |       } t        |      }| j                  rt        j                  j	                  | | z  d      j                         j                  d      }t        j                  j	                  ||z  d      j                         j                  d      }t        j                  | |j                               j                         }|d|z  z
  |z   }t        j                  |d      }t        j                  |      j                          S t        j                  | |d       S )	a  
    Computes the euclidean similarity (i.e., negative distance) between two tensors.
    Handles sparse tensors without converting to dense when possible.

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Matrix with res[i][j] = -euclidean_distance(a[i], b[j])
    r0   rH   r      g        ming       @rY   )r7   r   r'   sparserI   rB   r2   matmultclampsqrtr_   )r-   r<   	a_norm_sq	b_norm_sqdot_productsquared_dists         r!   euclidean_simrt      s     	!#A #A{{LL$$QU$2;;=GGJ	LL$$QU$2;;=GGJ	ll1acce,557 !1{?2Y> {{<S9

<(11333AqC(((r5   c                    t        |       } t        |      }t        j                  t        j                  | |z
  dz  d            j	                          S )a:  
    Computes the euclidean distance (i.e., negative distance) between pairs of tensors.

    Args:
        a (Union[list, np.ndarray, Tensor]): The first tensor.
        b (Union[list, np.ndarray, Tensor]): The second tensor.

    Returns:
        Tensor: Vector with res[i] = -euclidean_distance(a[i], b[i])
    rh   rG   rH   )r.   r'   ro   rI   rB   r;   s     r!   pairwise_euclidean_simrv     sF     	1A1AJJuyy!a%A267@@BBBr5   c                   | j                   rQt        j                  d       | j                         j	                         } |j                         j	                         }t        |       } t        |      }t        j                  | dd      \  }}t        j                  |dd      \  }}t        j                  |dz  |dz  z   dd      }||z  ||z  z   |z  }||z  ||z  z
  |z  }t        j                  |dz  |dz  z   dd      dz  }	t        j                  |dz  |dz  z   dd      dz  }
||	|
z  z  }||	|
z  z  }t        j                  t        j                  ||fd      d      }t        j                  |      S )aP  
    Computes the absolute normalized angle distance. See :class:`~sentence_transformers.losses.AnglELoss`
    or https://arxiv.org/abs/2309.12871v1 for more information.

    Args:
        x (Tensor): The first tensor.
        y (Tensor): The second tensor.

    Returns:
        Tensor: Vector with res[i] = angle_sim(a[i], b[i])
    zOPairwise angle similarity does not support sparse tensors. Converting to dense.rh   r0   rH   T)r1   keepdim      ?)r   r[   r\   r)   rB   r.   r'   chunkrI   concatre   )r    yr-   r<   cdzreimdzdw
norm_angles               r!   pairwise_angle_simr     sZ    	{{hiJJL!!#JJL!!#1A1A ;;q!#DAq;;q!#DAq		!Q$A+1d3A
a%!a%-1	B
a%!a%-1	B	1a4!Q$;At	4	;B	1a4!Q$;At	4	;B"r'MB"r'MB5<<Ra8a@J99Z  r5   c                H   | j                   s,t        j                  j                  j	                  | dd      S | j                         } | j                         | j                         }}t        j                  | j                  d      | j                        }|j                  d|d   |dz         t        j                  |      j                  d|d         }|dkD  }|j                         }||xx   ||   z  cc<   t        j                  ||| j                               S )z
    Normalizes the embeddings matrix, so that each sentence embedding has unit length.

    Args:
        embeddings (Tensor): The input embeddings matrix.

    Returns:
        Tensor: The normalized embeddings matrix.
    rh   r0   )rZ   r1   r   r   )r   r'   nn
functional	normalizer)   rQ   rT   zerossizer   
index_add_ro   index_selectclonesparse_coo_tensor)
embeddingsrQ   rT   	row_normsmasknormalized_valuess         r!   r?   r?   B  s     xx"",,Z1!,DD$$&J ((*J,=,=,?VG JOOA.z7H7HIIGAJ	2

9%221gajAI q=Ddy.""7,=z?PQQr5   c                     y r    r   truncate_dims     r!   truncate_embeddingsr   _  s    Y\r5   c                     y r   r   r   s     r!   r   r   c  s    ]`r5   c                    | dd|f   S )a  
    Truncates the embeddings matrix.

    Args:
        embeddings (Union[np.ndarray, torch.Tensor]): Embeddings to truncate.
        truncate_dim (Optional[int]): The dimension to truncate sentence embeddings to. `None` does no truncation.

    Example:
        >>> from sentence_transformers import SentenceTransformer
        >>> from sentence_transformers.util import truncate_embeddings
        >>> model = SentenceTransformer("tomaarsen/mpnet-base-nli-matryoshka")
        >>> embeddings = model.encode(["It's so nice outside!", "Today is a beautiful day.", "He drove to work earlier"])
        >>> embeddings.shape
        (3, 768)
        >>> model.similarity(embeddings, embeddings)
        tensor([[1.0000, 0.8100, 0.1426],
                [0.8100, 1.0000, 0.2121],
                [0.1426, 0.2121, 1.0000]])
        >>> truncated_embeddings = truncate_embeddings(embeddings, 128)
        >>> truncated_embeddings.shape
        >>> model.similarity(truncated_embeddings, truncated_embeddings)
        tensor([[1.0000, 0.8092, 0.1987],
                [0.8092, 1.0000, 0.2716],
                [0.1987, 0.2716, 1.0000]])

    Returns:
        Union[np.ndarray, torch.Tensor]: Truncated embeddings.
    .Nr   r   s     r!   r   r   g  s    : c=L=())r5   c                .   || S t        | t        j                        rt        j                  |       } | j
                  \  }}| j                  }t        j                  t        j                  |       t        ||      d      \  }}t        j                  | t        j                        }t        j                  ||      j                  d      j                  dt        ||            }d||j                         |j                         f<   d| | <   | S )a  
    Keeps only the top-k values (in absolute terms) for each embedding and creates a sparse tensor.

    Args:
        embeddings (Union[np.ndarray, torch.Tensor]): Embeddings to sparsify by keeping only top_k values.
        max_active_dims (int): Number of values to keep as non-zeros per embedding.

    Returns:
        torch.Tensor: A sparse tensor containing only the top-k values per embedding.
    r0   kr1   r#   r   rG   Tr   )r   npndarrayr'   r,   rP   r   topkre   rj   
zeros_likeboolaranger2   expandflatten)	r   max_active_dims
batch_sizer1   r   _top_indicesr   batch_indicess	            r!   select_max_active_dimsr     s     *bjj)\\*-
 &&OJF ZZ		* 5_c9RXYZNA{ Jejj9DLLF;EEaHOOPRTWXgilTmnM;?D			 +"5"5"7	78 Jur5   F    i  i i  d   c           	     T    | j                  |||d|	|
|      }t        ||||||      S )a@	  
    Given a list of sentences / texts, this function performs paraphrase mining. It compares all sentences against all
    other sentences and returns a list with the pairs that have the highest cosine similarity score.

    Args:
        model (SentenceTransformer): SentenceTransformer model for embedding computation
        sentences (List[str]): A list of strings (texts or sentences)
        show_progress_bar (bool, optional): Plotting of a progress bar. Defaults to False.
        batch_size (int, optional): Number of texts that are encoded simultaneously by the model. Defaults to 32.
        query_chunk_size (int, optional): Search for most similar pairs for #query_chunk_size at the same time. Decrease, to lower memory footprint (increases run-time). Defaults to 5000.
        corpus_chunk_size (int, optional): Compare a sentence simultaneously against #corpus_chunk_size other sentences. Decrease, to lower memory footprint (increases run-time). Defaults to 100000.
        max_pairs (int, optional): Maximal number of text pairs returned. Defaults to 500000.
        top_k (int, optional): For each sentence, we retrieve up to top_k other sentences. Defaults to 100.
        score_function (Callable[[Tensor, Tensor], Tensor], optional): Function for computing scores. By default, cosine similarity. Defaults to cos_sim.
        truncate_dim (int, optional): The dimension to truncate sentence embeddings to. If None, uses the model's ones. Defaults to None.
        prompt_name (Optional[str], optional): The name of a predefined prompt to use when encoding the sentence.
            It must match a key in the model `prompts` dictionary, which can be set during model initialization
            or loaded from the model configuration.

            Ignored if `prompt` is provided. Defaults to None.

        prompt (Optional[str], optional): A raw prompt string to prepend directly to the input sentence during encoding.

            For instance, `prompt="query: "` transforms the sentence "What is the capital of France?" into:
            "query: What is the capital of France?". Use this to override the prompt logic entirely and supply your own prefix.
            This takes precedence over `prompt_name`. Defaults to None.

    Returns:
        List[List[Union[float, int]]]: Returns a list of triplets with the format [score, id1, id2]
    T)show_progress_barr   convert_to_tensorr   prompt_nameprompt)query_chunk_sizecorpus_chunk_size	max_pairstop_kscore_function)encodeparaphrase_mining_embeddings)model	sentencesr   r   r   r   r   r   r   r   r   r   r   s                r!   paraphrase_miningr     sN    \ +!  J ()+% r5   c                   |dz  }t        j                         }d}d}t        dt        |       |      D ])  }	t        dt        |       |      D ]  }
 || |
|
|z    | |	|	|z          }t	        j
                  |t        |t        |d               ddd      \  }}|j                         j                         }|j                         j                         }t        t        |            D ]n  }t        ||         D ][  \  }}|
|z   }|	|z   }||k7  s||   |   |kD  s"|j                  ||   |   ||f       |dz  }||k\  sG|j                         }|d   }] p  , t               }g }|j                         sg|j                         \  }}}t        ||g      \  }}||k7  r-||f|vr'|j                  ||f       |j!                  |||g       |j                         sgt        |d d      }|S )	a  
    Given a list of sentences / texts, this function performs paraphrase mining. It compares all sentences against all
    other sentences and returns a list with the pairs that have the highest cosine similarity score.

    Args:
        embeddings (Tensor): A tensor with the embeddings
        query_chunk_size (int): Search for most similar pairs for #query_chunk_size at the same time. Decrease, to lower memory footprint (increases run-time).
        corpus_chunk_size (int): Compare a sentence simultaneously against #corpus_chunk_size other sentences. Decrease, to lower memory footprint (increases run-time).
        max_pairs (int): Maximal number of text pairs returned.
        top_k (int): For each sentence, we retrieve up to top_k other sentences
        score_function (Callable[[Tensor, Tensor], Tensor]): Function for computing scores. By default, cosine similarity.

    Returns:
        List[List[Union[float, int]]]: Returns a list of triplets with the format [score, id1, id2]
    r0   rG   r   TFr1   largestsortedc                    | d   S )Nr   r   r    s    r!   <lambda>z.paraphrase_mining_embeddings.<locals>.<lambda>0  s
    !A$ r5   keyreverse)queuePriorityQueuerangelenr'   r   rj   rR   tolist	enumerateputgetsetemptyr   addappend)r   r   r   r   r   r   pairs	min_score	num_addedcorpus_start_idxquery_start_idxscoresscores_top_k_valuesscores_top_k_idx	query_itr	top_k_idx
corpus_itrijentryadded_pairs
pairs_listscoresorted_isorted_js                            r!   r   r     s/   0 
QJE !EII!!S_6GH 1$QJ9IJ 	1O#?_?O-OP+.>AR.RSF
 5:JJE3vay>24PU51!1 #6"9"9";"B"B"D/335<<>"3v;/ 1	-67G	7R-S 
1)Iz')3A(:5AAv"5i"@"Ki"W		#6y#A)#LaQR"ST!Q	$	1$)IIKE(-aI
11	116 %KJkkmiikq!#QF^(xXx$8$KOOXx01uh9: kkm 
EJr5   c                     t        | i |S )z8This function is deprecated. Use semantic_search instead)semantic_search)argskwargss     r!   information_retrievalr   4  s    D+F++r5   
   c                F   t        | t        j                  t        j                  f      rt	        j
                  |       } n%t        | t              rt	        j                  |       } t        | j                        dk(  r| j                  d      } t        |t        j                  t        j                  f      rt	        j
                  |      }n%t        |t              rt	        j                  |      }|j                  | j                  k7  r| j                  |j                        } t        t        |             D cg c]  }g  }}t        dt        |       |      D ]  }t        ||z   t        |             }	| j                  r5t	        j                   ||	| j                        }
| j#                  d|
      }n| ||	 }t        dt        |      |      D ]^  }t        ||z   t        |            }|j                  r5t	        j                   |||j                        }
|j#                  d|
      }n||| } |||      }t	        j$                  |t        |t        |d               ddd      \  }}|j'                         j)                         }|j'                         j)                         }t        t        |            D ]n  }t+        ||   ||         D ]W  \  }}||z   }||z   }t        ||         |k  rt-        j.                  ||   ||f       =t-        j0                  ||   ||f       Y p a  t        t        |            D ]I  }t        t        ||               D ]  }||   |   \  }}||d||   |<    t3        ||   d d	      ||<   K |S c c}w )
a3  
    This function performs by default a cosine similarity search between a list of query embeddings  and a list of corpus embeddings.
    It can be used for Information Retrieval / Semantic Search for corpora up to about 1 Million entries.

    Args:
        query_embeddings (:class:`~torch.Tensor`): A 2 dimensional tensor with the query embeddings. Can be a sparse tensor.
        corpus_embeddings (:class:`~torch.Tensor`): A 2 dimensional tensor with the corpus embeddings. Can be a sparse tensor.
        query_chunk_size (int, optional): Process 100 queries simultaneously. Increasing that value increases the speed, but requires more memory. Defaults to 100.
        corpus_chunk_size (int, optional): Scans the corpus 100k entries at a time. Increasing that value increases the speed, but requires more memory. Defaults to 500000.
        top_k (int, optional): Retrieve top k matching entries. Defaults to 10.
        score_function (Callable[[:class:`~torch.Tensor`, :class:`~torch.Tensor`], :class:`~torch.Tensor`], optional): Function for computing scores. By default, cosine similarity.

    Returns:
        List[List[Dict[str, Union[int, float]]]]: A list with one entry for each query. Each entry is a list of dictionaries with the keys 'corpus_id' and 'score', sorted by decreasing cosine similarity scores.
    r0   r   r   TFr   )	corpus_idr   c                    | d   S )Nr   r   r   s    r!   r   z!semantic_search.<locals>.<lambda>  s    \]^e\f r5   r   )r   r   r   genericr'   r]   r%   r(   r   rP   r2   r   r*   r   rj   r   r   r   r   rR   r   zipheapqheappushheappushpopr   )query_embeddingscorpus_embeddingsr   r   r   r   r   queries_result_listr   query_end_idxrQ   query_chunkr   corpus_end_idxcorpus_chunk
cos_scorescos_scores_top_k_valuescos_scores_top_k_idxr   sub_corpus_idr   r   query_iddoc_itrs                           r!   r   r   9  s   0 "RZZ$<= ++,<=	$d	+ ;;'78
!!"a'+55a8#bjj"**%=>!,,->?	%t	,!KK(9: #3#:#::+../@/G/GH',S1A-B'CD!2DD C(8$9;KL $]O.>>DT@UV%%ll?MJZJaJabG*777CK*?=IK !&a->)?AR S 	] !14E!EsK\G]^N **,,'7PaPhPhi0==aI01A.Q (\BJ =BJJCs:a='9:4X]=9#%9 '>&A&A&C&J&J&L##7#;#;#=#D#D#F "3z?3 	]	,/0DY0OQhirQs,t ](M5 0= @I.:H.x89EA/9E9;M ))*=h*G%QZI[\]	]%	]$]N #123 vS!4X!>?@ 	^G28<WEE9CLW\5])'2	^ )//B8/LRfpt(uH%	v _ Es   >	Nc                '   t               st        d      ddlm} | j                  }|r||vr|d   }|r||vr|d   }|s|st        |      dk7  rt        d      ||rdnd	}t        j                  d
| d       |rA|dk7  s|	|||dk7  rt        j                  d       |d	k7  rt        j                  d       d	}||}
t        j                  d|
 d       t        | j                         j                  |      j                         j                         |   j                               }t        |      }|A|
||||dz  z   |z   }n||z   |z   }|dkD  r|rd}|rt!        d       |rt!        d| d       i } | |   }!| |   }"|du}#|#s|"}t        t"        j%                  ||"z               }t'        |      D $%ci c]  \  }$}%|%|$
 }&}$}%|!j)                         }'t        t"        j%                  |!            }!t'        |!      D $(ci c]  \  }$}(|(|$
 })}$}(t        |!      }*t+        j,                  |*      j/                  d      }+|j0                  },|*t        |'      k7  r|rt!        d|* dt        |'       d       |dkD  r%t3        j4                  |      }-t!        d|-dd       d}.d}/|rt7        j8                  |d       |j:                  j<                  xs d}0t?        j@                  |0djC                  |!      z   jE                               jG                         }1t?        j@                  |0djC                  |      z   jE                               jG                         }2t6        jH                  jC                  |d|1 d       }3t6        jH                  jC                  |d!|2 d       }4t6        jH                  jK                  |3      r3t3        jL                  |3      }/|rt!        d"|3 d#|/jN                   d$       t6        jH                  jK                  |4      r3t3        jL                  |4      }.|rt!        d%|4 d#|.jN                   d$       |.|/|rl|jQ                  tS        |tT              rdn|&      }5|.|jE                  ||5|ddd||'      }.|/|jE                  |!|5|ddd||'      }/|jW                  |5       n4|.|jE                  ||ddd||(      }.|/|jE                  |!|ddd||(      }/|rt6        jH                  jK                  3      s&t3        jX                  |3|/       |rt!        d)|3        t6        jH                  jK                  4      s&t3        jX                  |4|.       |rt!        d*|4        |r=ddl-}6|6j]                  |j_                               }7	 |6ja                         }8d|8_1        d|8_2        |6jg                  |7|8+      }7|7jk                  |.       g }9g }:tm        dt        |/      |d,-      D ]E  };|/|;|;|z    }<|7jo                  |<|dz   .      \  }=}>|9jq                  |=       |:jq                  |>       G t+        jr                  t3        jt                  |9d/            jw                  |,      }=t+        jr                  t3        jt                  |:d/            jw                  |,      }>n?|jy                  |/|.      jw                  |,      }=t+        jz                  |=||z   d0      \  }=}>t}        |*      D ?cg c]  }?g  }@}?t        |'|"      D ]!  \  }(}A|)|(   }B@|B   jq                  |&|A          # @D Ccg c]  }Ct        |C       }D}Cg }"g }'t}        |*      D ]B  }$|"j                  @|$   D Ecg c]  }E||E   	 c}E       |'j                  |!|$   gD|$   z         D @D Ccg c]  }Ct+        j                  |C|,1       }@}C|/t}        |*      D $?cg c]  }$t}        D|$         D ]  }?|$  c}?}$   }/|.t+        j                  @      j                            }F|j                  |/|F      jw                  |,      }G~/~F~.||
||t        t'        |>      d2t        |>      3      D ]O  \  }$}H|!|$   }(|HD Icg c]  }I||I   	 }J}I|j                  t        t        |(g|dz   z  |J            |d4      }K|K|=|$<   Q |j                  t        t        |'|"            |d4      }G|sUt+        j                  t}        |*      D Lcg c]  }Lt+        j                  |>|L   @|L           c}L      }Mt        d5       |=|M<   |=j                         }N|
|Qt+        j                  |*Gj0                  |Gj                  6      }Od}Pt}        |*      D ].  }Lt+        j                  GP|PD|L   z          O|L<   |P|D|Ldz
     z  }P0 |
p|=|
z   Oj                  |=j                  d      d      j                  kD  }Qt        d5       |=|Q<   |Qj                         j                         }R|RrR|RNz  d7| d8<   |N|Rz  }N|s|=Oj                  |=j                  d      d      j                  d|z
  z  kD  }Qt        d5       |=|Q<   |Qj                         j                         }R|RrR|RNz  d7| d9<   |N|Rz  }N|?|=|kD  }Qt        d5       |=|Q<   |Qj                         j                         }R|RrR|RNz  d7| d:<   |	?|=|	k  }Qt        d5       |=|Q<   |Qj                         j                         }R|RrR|RNz  d7| d;<   t+        jz                  |=|d0      \  }S}T|>|+|Tf   }>|r|>dd|df   }>Sdd|df   }S|dk(  r|>ddd|f   }>Sddd|f   }Sn|d<k(  r|>j                  d      Sj                         j                  d      z
  }U|Uj                  |=      }U|UD Vcg c]"  }Vt        j                  t}        |V      |.      $ }W}V|>|+|Wf   }>S|+|Wf   }S|Sj                  dd>      \  }S}T|>|+|Tf   }>t+        j                  t}        |*      D $cg c]  }$|>|$   j                  D|$   d       c}$      }>t+        j                  t}        |*      D $cg c]  }$S|$   j                  D|$   d       c}$      }S|rt!        d?       |dk(  rGSt        d5       k7  }Xt+        j                  |>      }Yt+        j                  |>      }Z|>|X   }>|S|X   }Sd}Pt}        |*      D ]c  }Lt+        j                  |L      j                  D|L   |      YP|P|D|L   z    @|L   j                  |d      j                  Z|P|P|D|L   z    |P|D|L   z  }Pe YX   }YZ|X   }@|g |g d@g i}[t        |Y|@|>      D ]K  \  }\}]}^[|   jq                  |!|\          |[|   jq                  ||]          |[d@   jq                  ||^          M Gj                  |d      j                  X   Sz
  }_n|dAk(  rSt        d5       k7  }X|g |g dBg i}[t}        |*      D ]  }B@|B   D ]D  }][|   jq                  |!B          |[|   jq                  ||]          |[dB   jq                  d       F t        |>B   S|B         D ]W  \  }^}`|`t        d5       k(  r[|   jq                  |!B          |[|   jq                  |^          |[dB   jq                  d       Y  SX   }SGj                  |d      j                  |X   |Sz
  }_n|d	k(  rSt        d5       k7  j                  dC      }X|S|X   }S|>|X   }>|t'        |X      D $acg c]  \  }$}a|as	|'|$    c}a}$|t'        X      D $acg c]  \  }$}a|as	|"|$    c}a}$it'        |>j                  dD      D ;bcci c]  \  };}bdE|; |bD ccg c]  }c||c   	 c}c c}c}b};}[Sj                         }SGj                  |d      j                  X   j                         |Sz
  }_n|dFk(  rSt        d5       k7  }X|t'        |X      D $dcg c]  \  }$}d|dj                         s|'|$    c}d}$|t'        t        X|>            D $dea7cg c]D  \  }$\  }d}e|dj                         r,|"|$   gt        de      D a7cg c]  \  }a}7|as	||7    c}7}az   F c}7}a}e}d}$dGXD dcg c]&  }d|dj                         sdgdgt        d      z  z   ( c}di}[SX   }SGj                  |d      j                  |X   |Sz
  }_t        [      dk(  rt        dH       |j                  [      }f|rdI}gdJ }ht!        |gj                  dKdLdMdN             t!        |gj                  dO |ht        G             |ht        S            d             dPt*        j4                  fdQt*        j                  fdRt*        j                  fdSdT fdUdV fdWdX fdYdZ fd[d\ ffD ]T  \  }i}jt!        gj                  |ij                          h |jG             |h |jS             |h |j_                         V d8|
fd9|fd:|fd;|	ffD ]4  \  }k}l|k| v s| k   d]   }m| |k   d^   }nt!        d_|md`da|ndbdc|k ddl de	       6 |t        |       z  t        S      z
  }o|odkD  rdfg}p|dkD  rpjq                  dg       |
pjq                  d8       |pjq                  d9       |pjq                  d:       dhjC                  pdd       }qt        |p      dkD  rqdipd   z   z  }qo|t        |       z  z  }rt!        dj|o dk|rdbdlq dmt        p      dkD  rdnnd do	       fS c c}%}$w c c}(}$w # th        $ r Y w xY wc c}?w c c}Cw c c}Ew c c}Cw c c}?}$w c c}Iw c c}Lw c c}Vw c c}$w c c}$w c c}a}$w c c}a}$w c c}cw c c}c}b};w c c}d}$w c c}7}aw c c}7}a}e}d}$w c c}dw )pu'=  
    Add hard negatives to a dataset of (anchor, positive) pairs to create (anchor, positive, negative) triplets or
    (anchor, positive, negative_1, ..., negative_n) tuples.

    Hard negative mining is a technique to improve the quality of a dataset by adding hard negatives, which are
    texts that may appear similar to the anchor, but are not. Using hard negatives can improve the performance of
    models trained on the dataset.

    This function uses a SentenceTransformer model to embed the sentences in the dataset, and then finds the closest
    matches to each anchor sentence in the dataset. It then samples negatives from the closest matches, optionally
    using a CrossEncoder model to rescore the candidates.

    Supports prompt formatting for models that expect specific instruction-style input.

    You can influence the candidate negative selection in various ways:

    - **range_min**: Minimum rank of the closest matches to consider as negatives: useful to skip the most similar texts to
      avoid marking texts as negative that are actually positives.
    - **range_max**: Maximum rank of the closest matches to consider as negatives: useful to limit the number of candidates
      to sample negatives from. A lower value makes processing faster, but may result in less candidate negatives that
      satisfy the margin or max_score conditions.
    - **max_score**: Maximum score to consider as a negative: useful to skip candidates that are too similar to the anchor.
    - **min_score**: Minimum score to consider as a negative: useful to skip candidates that are too dissimilar to the anchor.
    - **absolute_margin**: Absolute margin for hard negative mining: useful to skip candidate negatives whose similarity
      to the anchor is within a certain margin of the positive pair. A value of 0 can be used to enforce that the negative
      is always further away from the anchor than the positive.
    - **relative_margin**: Relative margin for hard negative mining: useful to skip candidate negatives whose similarity
      to the anchor is within a certain margin of the positive pair. A value of 0.05 means that the negative is at most 95%
      as similar to the anchor as the positive.
    - **sampling_strategy**: Sampling strategy for negatives: "top" or "random". "top" will always sample the top n
      candidates as negatives, while "random" will sample n negatives randomly from the candidates that satisfy the
      margin or max_score conditions.

    .. tip::

        The excellent `NV-Retriever paper <https://arxiv.org/abs/2407.15831>`_ is a great resource for understanding the
        details of hard negative mining and how to use it effectively. Notably, it reaches the strongest performance using
        these settings::

            dataset = mine_hard_negatives(
                dataset=dataset,
                model=model,
                relative_margin=0.05,         # 0.05 means that the negative is at most 95% as similar to the anchor as the positive
                num_negatives=num_negatives,  # 10 or less is recommended
                sampling_strategy="top",      # "top" means that we sample the top candidates as negatives
                batch_size=batch_size,        # Adjust as needed
                use_faiss=True,               # Optional: Use faiss/faiss-gpu for faster similarity search
            )

        This corresponds with the `TopK-PercPos (95%)` mining method.

    Example:

        >>> from sentence_transformers.util import mine_hard_negatives
        >>> from sentence_transformers import SentenceTransformer
        >>> from datasets import load_dataset
        >>> # Load a Sentence Transformer model
        >>> model = SentenceTransformer("all-MiniLM-L6-v2")
        >>>
        >>> # Load a dataset to mine hard negatives from
        >>> dataset = load_dataset("sentence-transformers/natural-questions", split="train")
        >>> dataset
        Dataset({
            features: ['query', 'answer'],
            num_rows: 100231
        })
        >>> dataset = mine_hard_negatives(
        ...     dataset=dataset,
        ...     model=model,
        ...     range_min=10,
        ...     range_max=50,
        ...     max_score=0.8,
        ...     relative_margin=0.05,
        ...     num_negatives=5,
        ...     sampling_strategy="random",
        ...     batch_size=128,
        ...     use_faiss=True,
        ... )
        Batches: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████| 588/588 [00:32<00:00, 18.07it/s]
        Batches: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████| 784/784 [00:08<00:00, 96.41it/s]
        Querying FAISS index: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████| 7/7 [00:06<00:00,  1.06it/s]
        Metric       Positive       Negative     Difference
        Count         100,231        487,865
        Mean           0.6866         0.4194         0.2752
        Median         0.7010         0.4102         0.2760
        Std            0.1125         0.0719         0.1136
        Min            0.0303         0.1702         0.0209
        25%            0.6221         0.3672         0.1899
        50%            0.7010         0.4102         0.2760
        75%            0.7667         0.4647         0.3590
        Max            0.9584         0.7621         0.7073
        Skipped 427,503 potential negatives (8.36%) due to the relative_margin of 0.05.
        Skipped 978 potential negatives (0.02%) due to the max_score of 0.8.
        Could not find enough negatives for 13290 samples (2.65%). Consider adjusting the range_max, range_min, relative_margin and max_score parameters if you'd like to find more valid negatives.
        >>> dataset
        Dataset({
            features: ['query', 'answer', 'negative'],
            num_rows: 487865
        })
        >>> dataset[0]
        {
            'query': 'when did richmond last play in a preliminary final',
            'answer': "Richmond Football Club Richmond began 2017 with 5 straight wins, a feat it had not achieved since 1995. A series of close losses hampered the Tigers throughout the middle of the season, including a 5-point loss to the Western Bulldogs, 2-point loss to Fremantle, and a 3-point loss to the Giants. Richmond ended the season strongly with convincing victories over Fremantle and St Kilda in the final two rounds, elevating the club to 3rd on the ladder. Richmond's first final of the season against the Cats at the MCG attracted a record qualifying final crowd of 95,028; the Tigers won by 51 points. Having advanced to the first preliminary finals for the first time since 2001, Richmond defeated Greater Western Sydney by 36 points in front of a crowd of 94,258 to progress to the Grand Final against Adelaide, their first Grand Final appearance since 1982. The attendance was 100,021, the largest crowd to a grand final since 1986. The Crows led at quarter time and led by as many as 13, but the Tigers took over the game as it progressed and scored seven straight goals at one point. They eventually would win by 48 points – 16.12 (108) to Adelaide's 8.12 (60) – to end their 37-year flag drought.[22] Dustin Martin also became the first player to win a Premiership medal, the Brownlow Medal and the Norm Smith Medal in the same season, while Damien Hardwick was named AFL Coaches Association Coach of the Year. Richmond's jump from 13th to premiers also marked the biggest jump from one AFL season to the next.",
            'negative': "2018 NRL Grand Final The 2018 NRL Grand Final was the conclusive and premiership-deciding game of the 2018 National Rugby League season and was played on Sunday September 30 at Sydney's ANZ Stadium.[1] The match was contested between minor premiers the Sydney Roosters and defending premiers the Melbourne Storm. In front of a crowd of 82,688, Sydney won the match 21â€“6 to claim their 14th premiership title and their first since 2013. Roosters five-eighth Luke Keary was awarded the Clive Churchill Medal as the game's official man of the match."
        }
        >>> dataset.push_to_hub("natural-questions-hard-negatives", "triplet-all")

    Args:
        dataset (Dataset): A dataset containing (anchor, positive) pairs.
        model (SentenceTransformer): A SentenceTransformer model to use for embedding the sentences.
        anchor_column_name (str, optional): The column name in `dataset` that contains the anchor/query. Defaults to None, in which case the first column in `dataset` will be used.
        positive_column_name (str, optional): The column name in `dataset` that contains the positive candidates. Defaults to None, in which case the second column in `dataset` will be used.
        corpus (List[str], optional): A list containing documents as strings that will be used as candidate negatives
            in addition to the second column in `dataset`. Defaults to None, in which case the second column in
            `dataset` will exclusively be used as the negative candidate corpus.
        cross_encoder (CrossEncoder, optional): A CrossEncoder model to use for rescoring the candidates. Defaults to None.
        range_min (int): Minimum rank of the closest matches to consider as negatives. Defaults to 0.
        range_max (int, optional): Maximum rank of the closest matches to consider as negatives. Defaults to None.
        max_score (float, optional): Maximum score to consider as a negative. Defaults to None.
        min_score (float, optional): Minimum score to consider as a negative. Defaults to None.
        absolute_margin (float, optional): Absolute margin for hard negative mining, i.e. the minimum distance between
            the positive similarity and the negative similarity. Defaults to None.
        relative_margin (float, optional): Relative margin for hard negative mining, i.e. the maximum ratio between
            the positive similarity and the negative similarity. A value of 0.05 means that the negative is at most
            95% as similar to the anchor as the positive. Defaults to None.
        num_negatives (int): Number of negatives to sample. Defaults to 3.
        sampling_strategy (Literal["random", "top"]): Sampling strategy for negatives: "top" or "random". Defaults to "top".
        query_prompt_name (Optional[str], optional): The name of a predefined prompt to use when encoding the first/anchor dataset column.
            It must match a key in the ``model.prompts`` dictionary, which can be set during model initialization
            or loaded from the model configuration.

            For example, if ``query_prompt_name="query"`` and the model prompts dictionary includes {"query": "query: "},
            then the sentence "What is the capital of France?" is transformed into: "query: What is the capital of France?"
            before encoding. This is useful for models that were trained or fine-tuned with specific prompt formats.

            Ignored if ``query_prompt`` is provided. Defaults to None.

        query_prompt (Optional[str], optional): A raw prompt string to prepend directly to the first/anchor dataset column during encoding.

            For instance, `query_prompt="query: "` transforms the sentence "What is the capital of France?" into:
            "query: What is the capital of France?". Use this to override the prompt logic entirely and supply your own prefix.
            This takes precedence over ``query_prompt_name``. Defaults to None.
        corpus_prompt_name (Optional[str], optional): The name of a predefined prompt to use when encoding the corpus. See
            ``query_prompt_name`` for more information. Defaults to None.
        corpus_prompt (Optional[str], optional): A raw prompt string to prepend directly to the corpus during encoding.
            See ``query_prompt`` for more information. Defaults to None.
        include_positives (bool): Whether to include the positives in the negative candidates.
            Setting this to True is primarily useful for creating Reranking evaluation datasets for CrossEncoder models,
            where it can be useful to get a full ranking (including the positives) from a first-stage retrieval model.
            Defaults to False.
        output_format (Literal["triplet", "n-tuple", "labeled-pair", "labeled-list"]): Output format for the `datasets.Dataset`. Options are:

            - "triplet": (anchor, positive, negative) triplets, i.e. 3 columns. Useful for e.g. :class:`~sentence_transformers.cross_encoder.losses.CachedMultipleNegativesRankingLoss`.
            - "n-tuple": (anchor, positive, negative_1, ..., negative_n) tuples, i.e. 2 + num_negatives columns. Useful for e.g. :class:`~sentence_transformers.cross_encoder.losses.CachedMultipleNegativesRankingLoss`.
            - "labeled-pair": (anchor, passage, label) text tuples with a label of 0 for negative and 1 for positive, i.e. 3 columns. Useful for e.g. :class:`~sentence_transformers.cross_encoder.losses.BinaryCrossEntropyLoss`.
            - "labeled-list": (anchor, [doc1, doc2, ..., docN], [label1, label2, ..., labelN]) triplets with labels of 0 for negative and 1 for positive, i.e. 3 columns. Useful for e.g. :class:`~sentence_transformers.cross_encoder.losses.LambdaLoss`.

            Defaults to "triplet".
        batch_size (int): Batch size for encoding the dataset. Defaults to 32.
        faiss_batch_size (int): Batch size for FAISS top-k search. Defaults to 16384.
        use_faiss (bool): Whether to use FAISS for similarity search. May be recommended for large datasets. Defaults to False.
        use_multi_process (bool | List[str], optional): Whether to use multi-GPU/CPU processing. If True, uses all GPUs if CUDA
            is available, and 4 CPU processes if it's not available. You can also pass a list of PyTorch devices like
            ["cuda:0", "cuda:1", ...] or ["cpu", "cpu", "cpu", "cpu"].
        verbose (bool): Whether to print statistics and logging. Defaults to True.
        cache_folder (str, optional): Directory path for caching embeddings. If provided, the function will save
            ``query_embeddings_{hash}.npy`` and ``corpus_embeddings_{hash}.npy`` under this folder after the first run,
            and on subsequent calls will load from these files if they exist to avoid recomputation. The hashes are
            computed based on the model name and the queries/corpus. Defaults to None.
        as_triplets (bool, optional): Deprecated. Use `output_format` instead. Defaults to None.
        margin (float, optional): Deprecated. Use `absolute_margin` or `relative_margin` instead. Defaults to None.


    Returns:
        Dataset: A dataset containing (anchor, positive, negative) triplets, (anchor, passage, label) text tuples with
        a label, or (anchor, positive, negative_1, ..., negative_n) tuples.
    zGPlease install `datasets` to use this function: `pip install datasets`.r   r   r0   rh   z)Dataset must contain exactly two columns.Ntripletzn-tuplezrThe `as_triplets` parameter is deprecated. Use the `output_format` parameter instead. Setting `output_format` to `z`.topzWhen using `include_positives=True`, updating `range_min`, `range_max`, `max_score`, `margin`, or `sampling_strategy` from the default values may still discard the positive values.z~When using `include_positives=True`, `output_format` will be set to `"n-tuple"` to ensure that the ranking order is preserved.zThe `margin` parameter is deprecated. Use the `absolute_margin` and/or `relative_margin` parameter instead. Setting `absolute_margin` to `r   i   z\Using FAISS, we can only retrieve up to 2048 documents per query. Setting range_max to 2048.zSetting range_max to z" based on the provided parameters.rG   zFound z unique queries out of z total queries.zFound an average of z.3fz positives per query.Texist_ok query_embeddings_z.npycorpus_embeddings_z%[Cache] Loaded query embeddings from z (shape=)z&[Cache] Loaded corpus embeddings from )target_devices)poolr   r?   convert_to_numpyr   r   r   )r   r?   r  r   r   r   z"[Cache] Saved query embeddings to z#[Cache] Saved corpus embeddings to )cozQuerying FAISS index)desc)r   )axisr   r   zRescoring with CrossEncoder)r  total)r   r   inf)r   r$   )skippedratioabsolute_marginrelative_margin	max_scorer   randomri   )r1   
descendingz/Negative candidates mined, preparing dataset...negativezlabeled-pairlabelrH   )start	negative_zlabeled-listlabelszHNo triplets could be generated. Please check the parameters and dataset.z{:<6} {:>14} {:>14} {:>14}c                b    t        | t        j                        r| j                         dS | dS )Nz.4f,)r   r'   r   item)values    r!   r   z%mine_hard_negatives.<locals>.<lambda>1  s)    Juell<[ejjl3%7 dijkcl r5   MetricPositiveNegative
DifferenceCountmeanmedianstdrj   c                h    | j                         dkD  rt        j                  |       S t        d      S )Nr   r  )numelr'   rj   r^   r   s    r!   r   z%mine_hard_negatives.<locals>.<lambda>?  s'    8J599V#4 PUV[P\ r5   z25%c                    | j                         dkD  r%t        j                  | j                         d      S t        d      S )Nr   g      ?qr  r,  r'   quantiler^   r-  s    r!   r   z%mine_hard_negatives.<locals>.<lambda>@  0    V\\^^_M_5>>&,,.D#I ejkpeq r5   z50%c                    | j                         dkD  r%t        j                  | j                         d      S t        d      S )Nr   ry   r/  r  r1  r-  s    r!   r   z%mine_hard_negatives.<locals>.<lambda>A  s0    FLLN]^L^5>>&,,.C#H dijodp r5   z75%c                    | j                         dkD  r%t        j                  | j                         d      S t        d      S )Nr         ?r/  r  r1  r-  s    r!   r   z%mine_hard_negatives.<locals>.<lambda>B  r3  r5   maxc                h    | j                         dkD  rt        j                  |       S t        d      S )Nr   z-inf)r,  r'   r7  r^   r-  s    r!   r   z%mine_hard_negatives.<locals>.<lambda>C  s'    8J599V#4 PUV\P] r5   r  r  zSkipped r   z potential negatives (z.2%z) due to the z of .	range_max	range_minz, z and z$Could not find enough negatives for z
 samples (z). Consider adjusting the z
 parametersz, if you'd like to find more valid negatives.)ais_datasets_availableImportErrordatasetsr   column_namesr   
ValueErrorr[   r\   r%   	to_pandasgroupbycountto_dictrT   r7  printdictfromkeysr   copyr'   r   r2   r   r   r(  osmakedirsmodel_card_data
base_modelhashlibmd5joinr   	hexdigestpathexistsloadrP   start_multi_process_poolr   r   stop_multi_process_poolsavefaissIndexFlatIP get_sentence_embedding_dimensionGpuMultipleClonerOptionsshard
useFloat16index_cpu_to_all_gpus	Exceptionr   r   searchr   r]   concatenater*   
similarityr   r   r   extendr,   catr   similarity_pairwiser   predictr(   isinr^   r,  r   r$   rj   repeatr   TrI   r!  isinfrn   r  samplesort
empty_liker&   r   any	from_dictformatr)  r*  
capitalize)sdatasetr   anchor_column_namepositive_column_namecorpuscross_encoderr;  r:  r  r   r  r  num_negativessampling_strategyquery_prompt_namequery_promptcorpus_prompt_namecorpus_promptinclude_positivesoutput_formatr   faiss_batch_size	use_faissuse_multi_processverbosecache_folderas_tripletsmarginr   columnspositives_per_querymax_positiveslog_countersqueries	positivesseparate_corpusidxtext
corpus_idxall_queriesqueryqueries_idx	n_queries	batch_idxr   avg_positives_per_queryr   r   
model_name
query_hashcorpus_hashquery_cache_filecorpus_cache_filer  rX  indexr  scores_listindices_listr   r   r   rQ   r   positive_indicespositive	query_idxrZ   n_positivesdoc_idxpositive_embeddingspositive_scorescandidate_idx_idxcandidate_passagespred_scoresq_idxpositive_masknum_candidatesmax_positive_scores	start_idxremoved_indicesnum_skippednegative_scoreslocal_indicesnum_optionsoptionssampled_idxindices_to_keepanchor_indicespos_indicesdataset_data
anchor_idxpositive_idxnegative_idxdifference_scoresnegative_scorekeepneg_indicesneg_idxkeep_rowindices_rowoutput_dataset
row_format	formatterrX   function
param_nameparam_valuer  r  missing_negatives	solutionsconsiderationsmissing_negatives_ratioss                                                                                                                      r!   mine_hard_negativesr    s2   ^ !"cdd  ""G!37!B$QZ#7w#F&qz&:s7|q?PDEE%0	i++8/=	

 N$$! E)NNe I%NN Q &M --<,=RA	
 ##$67==?GGIJ^_ffh +,M&/*EI^!]R%78=HI "M1MAIt	Itu))4VWXL()G,-ID(O $-- 234F .7v->?	T$)?J? ,,.K4==)*G09'0BC*#u5#:CKCGIY'11"5I\\FC$$yk!8[9I8J/Z[q"$''*=">$%<S$AAVWX
L40**55;
[[*rwww/?"?!G!G!IJTTV
kk:#?"G"G"IJTTV77<<8I*UY6Z[GGLL9KK=X\7]^77>>*+!ww'78=>N=OxXhXnXnWoopqr77>>+, "(9 :>?P>QQYZkZqZqYrrstu  $4$<11'12CT'JtPa 2 D !($)LL))-%)&* 2( %1 	%!  '#(<<))-%)&* 1' $0 	$  ))$/ ($)LL))-%)&* 2( %1 %!  '#(<<))-%)&* 1' $0 $  ww~~./GG$&67:;K:LMNww~~/0GG%'89;<M;NOP!!%"H"H"JK	//1BBH BM','B'B5R'B'PE 			#$3/02BI_` 	)A*1q3C/CDK#ll;)a-lHOFGv&(		)
 !!"..1"EFII&Q""2>>,Q#GHKKFS !!"24EFII&Q  **Vy=/HaP
 %*)$45q55{I6 Ax&	#**:h+?@A $44a3q64K4 IKY >9I#9NOg&/OPGCL>K,<<=> AQQ1Qv6QQ (i8H(jRWXcdgXhRi(jQ(j(jk+EII6F,G,N,N,PQ//0@BUVYYZ`aO  #'BiF["&y'9@]ehipeq"r 	&CCLE;H!I4&,!I!I'//S%IM24FGH%"& 0 K
 &F3K	& (//[),-!" 0 
 NST]N^_UUZZ(8(?@_
 "'u}\\^N "o&A $kk)O<R<RZiZoZop	9% 	0E).?9y[fgl[mOm3n)o&UQY//I	0 &$69L9S9STZT_T_`aTbde9f9h9hhO',U|mF?#)--/446K>IT_bpTp2q./+-&$':'A'A&++a.RS'T'V'VZ[^mZm'nnO',U|mF?#)--/446K>IT_bpTp2q./+-  9,#(<-%))+002&$~5)L%  9,#(<-%))+002&$~5)L% &+ZZ)%K"O]i./G!YZ-()!YZ-8 E!!^m^+,)!^m^*;<	h	&ll1o(=(=(?(C(CA(FF!''M':U`a'v}}U7^}Eaa)[01))[*@A)8)=)=!PT)=)U&)]23 iiUS\M]^c,,[-=qA^_Gii]bcl]m nVY!5!<!<[=Mq!Q noO?@	!)eEl]:))'2&&w//*)/: 	9% 	,EINV[I\IcIcE"MJN9y;u3E'EF !'..}a@BB 	IE0B$BC U++I	, (8&7  "
 7:.JZ\c6d 	B2Jl+,33GJ4GH-.55f\6JK$++F<,@A	B ,22=!DFFWZii	.	()eEl]:  "R
 y) 
	0I 0 ; 0/077	8JK1299&:NOW%,,Q/0 14GI4FXaHb0c 0,n!eEl]2/077	8JK1299&:NOW%,,Q/0
	0 */:+22=!DFFWZii	)	#*uU|m;@@Q@G)/:/* 9_C] fic4aeS!1 f 9_C]"fic4ae9S>"f

 '0		&C "A{ A3!Mg&/!MM
 *113+22=!DFFW__adss	.	()eEl]: yQ`Ga tmc8emeqeqesS!1 t 4=c/SZ>[4\# #0C0(K<<> 3 SS^E_#hkdEcgF5M#hh#
 cXT\T`T`TbsaS3x=00c
 */:+22=!DFFWZii
<Acdd&W&&|4N 1
n	j*j,OP#o./#o./		
 UZZ u||$EII\]qrpqqr]^	!
 	FH !!%%'h78h78h'89:		( 00)$)$	(
 	#J \)&z29=$Z09wqk)?c{-XbWccghsgttuv	 +S\9S=QQq $I1}  -*  !23*  !23$  -!YYy"~6N9~!'IbM"99&7=3w<;W&X#67H6ITkloSp q++9*:*CPYN]^L^SdfDg  hTU
 s @
 DX  		4 6 5 P R )k "J  `` b _ nN !g"f!M !u#h#
 ds   AM0AM611AM< 	ANANAN
AN0AN AN&#AN+'AN0AN5AN:
AN?&AN??
AO
AO0AO
 AOAO
@=AOAAOA</AO#B+
AOB6AOB=AO#CAO,C&AO,M<	AN	NAN	OAO
OAO#c                   t         j                  j                  |      dk7  r4t        j                  t         j                  j                  |      d       t	        j
                  | d      }|j                  dk7  r<t        d|  d|j                   t        j                         |j                          y	|d
z   }t        |d      5 }|j                  j                  d      }|t        |      nd	}t        d|d      }|j                  d      D ]0  }|s|j!                  t#        |             |j%                  |       2 	 d	d	d	       t        j&                  ||       j)                          y	# 1 sw Y   0xY w)a  
    Downloads a URL to a given path on disk.

    Args:
        url (str): The URL to download.
        path (str): The path to save the downloaded file.

    Raises:
        requests.HTTPError: If the HTTP request returns a non-200 status code.

    Returns:
        None
    r  Tr  )stream   z"Exception when trying to download z. Response )fileN_partwbzContent-LengthB)unitr  
unit_scale   )
chunk_size)rJ  rR  dirnamerK  requestsr   status_coderF  sysstderrraise_for_statusopenheadersintr   iter_contentupdater   writerenameclose)	urlrR  reqdownload_filepathfile_binarycontent_lengthr  progressrz   s	            r!   http_getr  r  s:    
wwt"
BGGOOD)D9
,,s4
(C
#23%{3??BST[^[e[efw		& )+)9:'5'AN#tS$?%%%6 	)EE
+!!%(	)	) II&NN) )s   
AE8-E88Fc                j    | D ]-  }t        | |   t              s| |   j                  |      | |<   / | S )au  
    Send a PyTorch batch (i.e., a dictionary of string keys to Tensors) to a device (e.g. "cpu", "cuda", "mps").

    Args:
        batch (Dict[str, Tensor]): The batch to send to the device.
        target_device (torch.device): The target device (e.g. "cpu", "cuda", "mps").

    Returns:
        Dict[str, Tensor]: The batch with tensors sent to the target device.
    )r   r   r*   )batchtarget_devicer   s      r!   batch_to_devicer    s>      6eCj&)s}5E#J6 Lr5   c                    | j                   j                  }||t        j                   j                  k(  r| j                   j                  S |dz   | j                   j                  z   S )a  
    Gives a full name (package_name.class_name) for a class / object in Python. Will
    be used to load the correct classes from JSON files

    Args:
        o: The object for which to get the full name.

    Returns:
        str: The full name of the object.

    Example:
        >>> from sentence_transformers.losses import MultipleNegativesRankingLoss
        >>> from sentence_transformers import SentenceTransformer
        >>> from sentence_transformers.util import fullname
        >>> model = SentenceTransformer('all-MiniLM-L6-v2')
        >>> loss = MultipleNegativesRankingLoss(model)
        >>> fullname(loss)
        'sentence_transformers.losses.MultipleNegativesRankingLoss.MultipleNegativesRankingLoss'
    r9  )	__class__
__module__str__name__)omodules     r!   fullnamer    sR    * [[##F~3==#;#;;{{###|akk2222r5   c                8   	 | j                  dd      \  }}	 t        j                  |       }	 t        ||      S # t        $ r |  d}t        |      w xY w# t
        $ r t        j                  |      }Y Jw xY w# t        $ r d| d| d}t        |      w xY w)a:  
    Import a dotted module path and return the attribute/class designated by the
    last name in the path. Raise ImportError if the import failed.

    Args:
        dotted_path (str): The dotted module path.

    Returns:
        Any: The attribute/class designated by the last name in the path.

    Raises:
        ImportError: If the import failed.

    Example:
        >>> import_from_string('sentence_transformers.losses.MultipleNegativesRankingLoss')
        <class 'sentence_transformers.losses.MultipleNegativesRankingLoss.MultipleNegativesRankingLoss'>
    r9  r0   z  doesn't look like a module pathzModule "z" does not define a "z" attribute/class)rsplitrA  r>  	importlibimport_moduler_  getattrAttributeError)dotted_pathmodule_path
class_namemsgr  s        r!   import_from_stringr    s    $"-"4"4S!"<Z
6((5vz**  =>#  6((56
  %::,FWX#s%   : A A; AA87A8;Bc                   t        | t        j                        st        j                  |       } t        j                  || j                        }t        |       } g }t        |t        |             }t        t        d|z  d      t        |             }t        t        dt        |       |      d|       D ]  }| |||z    | j                  z  }| j                  j                  dv r||k\  }	|	j                  d      }
|
|k\  }|j                         s]|
|   }
||   }|
j                         }|j                  |d	
      \  }}t!        |
|      D ]'  \  }}|j#                  |d| j%                                ) |j                  |d	
      \  }}t        t        |            D ]  }||   d   |k\  s||   j                  |d	
      \  }}|d   |kD  rV|t        |       k  rHt        d|z  t        |             }||   j                  |d	
      \  }}|d   |kD  r|t        |       k  rH|j#                  |||k\     j%                                  t'        |d d	      }g }t)               }t+        |      D ]U  \  }}g }|D ]  }||vs|j#                  |        t        |      |k\  s4|j#                  |       |j-                  |       W t'        |d d	      }|S )a  
    Function for Fast Community Detection.

    Finds in the embeddings all communities, i.e. embeddings that are close (closer than threshold).
    Returns only communities that are larger than min_community_size. The communities are returned
    in decreasing order. The first element in each list is the central point in the community.

    Args:
        embeddings (torch.Tensor or numpy.ndarray): The input embeddings.
        threshold (float): The threshold for determining if two embeddings are close. Defaults to 0.75.
        min_community_size (int): The minimum size of a community to be considered. Defaults to 10.
        batch_size (int): The batch size for computing cosine similarity scores. Defaults to 1024.
        show_progress_bar (bool): Whether to show a progress bar during computation. Defaults to False.

    Returns:
        List[List[int]]: A list of communities, where each community is represented as a list of indices.
    r   rh   2   r   zFinding clusters)r  disable)cudanpur0   T)r   r   NrG   c                    t        |       S r   r   r   s    r!   r   z%community_detection.<locals>.<lambda>9  s
    A r5   r   c                    t        |       S r   r  r   s    r!   r   z%community_detection.<locals>.<lambda>I  s
    #a& r5   )r   r'   r   r,   r   r?   rj   r   r7  r   r   ri  typerI   rn  r   r   r   r   r   r   r   r  )r   	thresholdmin_community_sizer   r   extracted_communitiessort_max_sizer  r   threshold_maskrow_wise_countlarge_enough_maskr   r   top_k_indicesrD  rQ   top_k_valuesr   top_val_largetop_idx_largeunique_communitiesextracted_ids
cluster_id	communitynon_overlapped_communityr  s                              r!   community_detectionr    s.   0 j%,,/\\*-
Yz/@/@AI%j1J /ZAA 22B7ZIMaZ*-4FTePe *e	  	I
,BCjllR
 !!_4'94N+//2N !/2D D$((*+,=>N#$56J ""$A)DAA} #&nm"D Gw%,,WVe_-C-C-EFG )oo0BDoQOL! 3|,- 
e?2&)33=a=3E3E_c3E3d0M= (+i7MCPZO<[(+A,=s:(O7A!}7I7IMcg7I7h4} (+i7MCPZO<[ *00}PY?Y1Z1a1a1cd
eA*eZ ##8>NX\] EM!*+@!A ;
I#%  	5C-'(//4	5 '(,>>%%&>?  !9:;   28HRVWr5   c                  .     e Zd ZdZ fdZd fdZ xZS )disabled_tqdmz
    Class to override `disable` argument in case progress bars are globally disabled.

    Taken from https://github.com/tqdm/tqdm/issues/619#issuecomment-619639324.
    c                .    d|d<   t        |   |i | y )NTr  )super__init__)selfr   r   r  s      r!   r#  zdisabled_tqdm.__init__Z  s     y$)&)r5   c                P    	 t         |   |       y# t        $ r	 |dk7  r Y yw xY w)zBFix for https://github.com/huggingface/huggingface_hub/issues/1603_lockN)r"  __delattr__r  )r$  attrr  s     r!   r'  zdisabled_tqdm.__delattr__^  s3    	G% 	w 	s    %%)r(  r  returnNone)r  r  __qualname____doc__r#  r'  __classcell__)r  s   @r!   r   r   S  s    * r5   r   c              #     K   t         j                  j                  j                  }t        j                  |        	 d t        j                  |       y# t        j                  |       w xY ww)z
    A context manager that will prevent any logging messages
    triggered during the body from being processed.

    Args:
        highest_level: the maximum logging level allowed.
    N)loggingrootmanagerr  )highest_levelprevious_levels     r!   disable_loggingr4  g  sI      \\))11NOOM"(''s   :A1A A1A..A1c           
     6    t        t        | d||||            S )a}  
    Checks if the given model name or path corresponds to a SentenceTransformer model.

    Args:
        model_name_or_path (str): The name or path of the model.
        token (Optional[Union[bool, str]]): The token to be used for authentication. Defaults to None.
        cache_folder (Optional[str]): The folder to cache the model files. Defaults to None.
        revision (Optional[str]): The revision of the model. Defaults to None.
        local_files_only (bool): Whether to only use local files for the model. Defaults to False.

    Returns:
        bool: True if the model is a SentenceTransformer model, False otherwise.
    zmodules.json)tokenr  revisionlocal_files_only)r   load_file_path)model_name_or_pathr6  r  r7  r8  s        r!   is_sentence_transformer_modelr;  {  s+    ( %-	
	 	r5   c           
         t        | ||      }|j                         rt        |      S t        ||      }	 t        | |j                  |j
                  j                         |d|||      S # t        $ r Y yw xY w)a  
    Loads a file from a local or remote location.

    Args:
        model_name_or_path (str): The model name or path.
        filename (str): The name of the file to load.
        subfolder (str): The subfolder within the model subfolder (if applicable).
        token (Optional[Union[bool, str]]): The token to access the remote file (if applicable).
        cache_folder (Optional[str]): The folder to cache the downloaded file (if applicable).
        revision (Optional[str], optional): The revision of the file (if applicable). Defaults to None.
        local_files_only (bool, optional): Whether to only consider local files. Defaults to False.

    Returns:
        Optional[str]: The path to the loaded file, or None if the file could not be found or loaded.
    sentence-transformers)filename	subfolderr7  library_namer6  	cache_dirr8  N)r   rS  r  r   nameparentas_posixr_  )r:  r>  r?  r6  r  r7  r8  	file_paths           r!   r9  r9    s    2 'H=I9~ Y)I^^&&//10"-	
 		
  s   4A+ +	A76A7c           	     .   t        |t              r|j                         }t        | |      }|j                         rt	        |      S | ||dvr| dndd|||t
        d}	 t        di |}t        ||      S # t        $ r d|d<   t        di |}Y 'w xY w)	af  
    Loads the subfolder path for a given model name or path.

    Args:
        model_name_or_path (str): The name or path of the model.
        subfolder (str): The subfolder to load.
        token (Optional[Union[bool, str]]): The token for authentication.
        cache_folder (Optional[str]): The folder to cache the downloaded files.
        revision (Optional[str], optional): The revision of the model. Defaults to None.
        local_files_only (bool, optional): Whether to only use local files. Defaults to False.

    Returns:
        Optional[str]: The subfolder path if it exists, otherwise None.
    )r  r9  z/**Nr=  )repo_idr7  allow_patternsr@  r6  rA  r8  
tqdm_classTr8  r   )r   r   rD  rS  r  r   r   r_  )	r:  r?  r6  r  r7  r8  dir_pathdownload_kwargs	repo_paths	            r!   load_dir_pathrM    s    , )T"&&(	 &	2H8} &/8	/IYKs+t/!,#	O9%88	
 	9%%	  9.2*+%88	9s   !A8 8BBc                B     t        j                          fd       }|S )Nc                    |j                  dd       }|rd|vrt        j                  d       ||d<   t        |      dk\  rg |d d d |dd  } | g|i |S )N	repo_namerG  zfProviding a `repo_name` keyword argument to `save_to_hub` is deprecated, please use `repo_id` instead.rh   )popr[   r\   r   )r$  r   r   rP  funcs       r!   wrapperz+save_to_hub_args_decorator.<locals>.wrapper  s|     JJ{D1	&0NNx !*F9 t9>/T"1X/t/d12h/DD*4*6**r5   )	functoolswraps)rR  rS  s   ` r!   save_to_hub_args_decoratorrV    s%    __T+ + Nr5   c                 z   t         j                  j                         rdt        j                  v rt        t        j                  d         } n|t         j                  j                         r\t         j                  j                         t         j                  j                         kD  rt         j                  j                         } nd} d|  S t         j                  j                  j                         ryt               ryt        j                  j                  d      ddlm c m} |j                         ryy	)
aO  
    Returns the name of the device where this module is running on.

    This function only supports single device or basic distributed training setups.
    In distributed mode for cuda device, it uses the rank to assign a specific CUDA device.

    Returns:
        str: Device name, like 'cuda:2', 'mps', 'npu', 'hpu', or 'cpu'
    
LOCAL_RANKr   zcuda:mpsr	  habana_frameworksNhpurR   )r'   r  is_availablerJ  environr  distributedis_initializeddevice_countget_rankbackendsrY  r   r  util	find_spechabana_frameworks.torch.hpur[  )
local_rankhthpus     r!   get_device_namerh    s     zz 2::%RZZ56J--/EJJ4K4K4MPUPaPaPjPjPl4l**335JJzl##				(	(	*		!		!	!"5	6	B33r5   c                Z    	 t        |       }|d   | k(  xr ||d   v S # t        $ r Y yw xY w)zB
    Checks if a package is available from the correct owner.
    Namez	Home-pageF)r   r   )package_nameownermetas      r!   check_package_availabilityrn  -  sA    %F||+Jk9J0JJ s    	**c                     t        dd      S )zJ
    Returns True if the Huggingface accelerate library is available.
    
acceleratehuggingfacern  r   r5   r!   is_accelerate_availablers  8  s     &lMBBr5   c                     t        dd      S )zH
    Returns True if the Huggingface datasets library is available.
    r?  rq  rr  r   r5   r!   r=  r=  ?  s     &j-@@r5   c                 .    t               xr
 t               S )z
    Returns True if we have the required dependencies for training Sentence
    Transformers models, i.e. Huggingface datasets and Huggingface accelerate.
    )rs  r=  r   r5   r!   is_training_availablerv  F  s    
 #$@)>)@@r5   c               #  |   K   ddl m} m}m}  |       }	 |r |         d |r |        yy# |r |        w w xY ww)zN
    A context manager that will disable caching in the datasets library.
    r   )disable_cachingenable_cachingis_caching_enabledN)r?  rx  ry  rz  )rx  ry  rz  is_originally_enableds       r!   disable_datasets_cachingr|  N  sF     
 ML.0   !  !s   <- <9<c                d   t        | dd      5 }t        j                  |      }t        |      }d d d        t	              dkD  rS|d   j                  |       t        | ddd      5 }t        j                  |      }|j                  |       d d d        yy# 1 sw Y   kxY w# 1 sw Y   yxY w)	Nr  zutf-8)newlineencodingr0   rG   wTF)r  csvreaderr%   r   rc  writer	writerows)csv_pathadditional_datafr  rowsr  s         r!   append_to_last_rowr  `  s    	hW	5 AF| 4y1}R( (Cg> 	#!ZZ]FT"	#  	# s   !B)'B&B#&B/)r-   list | np.ndarray | Tensorr)  r   )r-   r   r)  r   )r-   r   r<   r   r)  r   )r-   r  r<   r  r)  r   )r    r   r)  r   )r-   r  r<   r  )r    r   r|   r   r)  r   )r   r   r)  r   )r   
np.ndarrayr   
int | Noner)  r  )r   torch.Tensorr   r  r)  r  )r   np.ndarray | torch.Tensorr   r  r)  r  )r   r  r   r  r)  r  )r   z	list[str]r   r   r   r  r   r  r   r  r   r  r   r  r   "Callable[[Tensor, Tensor], Tensor]r   r  r   
str | Noner   r  r)  list[list[float | int]])r   r   r   r  r   r  r   r  r   r  r   r  r)  r  )r)  "list[list[dict[str, int | float]]])r   r   r   r   r   r  r   r  r   r  r   r  r)  r  )NNNNr   NNNNN   r  NNNNFr  r   i @  FFTNNN):rr  r   r   r   rs  r  rt  r  ru  zlist[str] | Nonerv  zCrossEncoder | Noner;  r  r:  r  r  float | Noner   r  r  r  r  r  rw  r  rx  zLiteral['random', 'top']ry  r  rz  r  r{  r  r|  r  r}  r   r~  z=Literal['triplet', 'n-tuple', 'labeled-pair', 'labeled-list']r   r  r  r  r  r   r  zlist[str] | boolr  r   r  r  r  zbool | Noner  r  r)  r   )r  r  rR  r  r)  r*  )r  dict[str, Any]r  r   r)  r  )r)  r  )r   r  r)  r  )r6  r   r  F)r   ztorch.Tensor | np.ndarrayr  r^   r  r  r   r  r   r   r)  zlist[list[int]])NNNF)r:  r  r6  bool | str | Noner  r  r7  r  r8  r   r)  r   )r  NNNF)r:  r  r>  z
str | Pathr?  r  r6  r  r  r  r7  r  r8  r   r)  r  )r:  r  r?  r  r6  r  r  r  r7  r  r8  r   r)  r  )rk  r  rl  r  r)  r   )r)  r   )]
__future__r   r  rT  rN  r   r  r/  rJ  r   r  r  
contextlibr   importlib.metadatar   r   pathlibr   typingr   r	   r
   r   r   rS   r   r  r'   huggingface_hubr   r   scipy.sparser   sklearn.metricsr   r   r   r   r   tqdm.autonotebooktransformersr   	getLoggerr  r[   r?  r   0sentence_transformers.cross_encoder.CrossEncoderr   )sentence_transformers.SentenceTransformerr   r.   r4   r7   r=   r:   rK   rM   rJ   rU   rc   rf   rt   rv   r   r?   r   r   r   r   r   r   r  r  r  r  r  r  r   CRITICALr4  r;  r9  rM  rV  rh  rn  rs  r=  rv  r|  r  r   r5   r!   <module>r     s   " 
      	   
 % =  B B    > # .    " /			8	$ MM2"?&_.5"*"I46;")>C"#!LR: 
 \ 
 \ 
 ` 
 `*@J $ #9@#"?? ? 	?
 ? ? ? ? 7? ? ? ? ?H !#9@FFF F 	F
 F 7F FR,  #9@XXX X 	X
 X 7X (X| &*'+#)- ""$($(27$(#%) $#S\!*/##9[[[ #[ %	[
 [ '[ [ [ [ [ "[ "[ [ 0[ "[  ![" ##[$ %[& '[( Q)[* +[, -[. /[0 (1[2 3[4 5[6 7[8 9[: ;[|"J"38!L  #c)cc c 	c
 c cVD ( ")"2"2 ( (*  $#"  	
  
F ##"+++ + 	+
 + + + +b  $#"/&/&/& /& 	/&
 /& /& /&d(<CAA  "r5   