
    D_i.                         S r SSKrSSKrSSKJr  SSKJr  SSKJr  \R                  " \
5         SSKrSSS5         " S S\\5      r " S S	\\5      rg! , (       d  f       N'= f)
zAModule contains a few fake embedding models for testing purposes.    N)	BaseModel)override)
Embeddingsc                       \ rS rSr% Sr\\S'    S\\   4S jr	\
S\\   S\\\      4S j5       r\
S\S\\   4S	 j5       rS
rg)FakeEmbeddings   a  Fake embedding model for unit testing purposes.

This embedding model creates embeddings by sampling from a normal distribution.

!!! danger "Toy model"
    Do not use this outside of testing, as it is not a real embedding model.

Instantiate:
    ```python
    from langchain_core.embeddings import FakeEmbeddings

    embed = FakeEmbeddings(size=100)
    ```

Embed single text:
    ```python
    input_text = "The meaning of life is 42"
    vector = embed.embed_query(input_text)
    print(vector[:3])
    ```
    ```python
    [-0.700234640213188, -0.581266257710429, -1.1328482266445354]
    ```

Embed multiple texts:
    ```python
    input_texts = ["Document 1...", "Document 2..."]
    vectors = embed.embed_documents(input_texts)
    print(len(vectors))
    # The first 3 coordinates for the first vector
    print(vectors[0][:3])
    ```
    ```python
    2
    [-0.5670477847544458, -0.31403828652395727, -0.5840547508955257]
    ```
sizereturnc                 ~    [        [        R                  R                  5       R	                  U R
                  S95      $ N)r	   )listnprandomdefault_rngnormalr	   )selfs    X/home/james-whalen/.local/lib/python3.13/site-packages/langchain_core/embeddings/fake.py_get_embeddingFakeEmbeddings._get_embedding:   s,    BII))+22		2BCC    textsc                 L    U Vs/ s H  o R                  5       PM     sn$ s  snf Nr   r   r   _s      r   embed_documentsFakeEmbeddings.embed_documents=   s!    /45u!##%u555s   !textc                 "    U R                  5       $ r   r   r   r   s     r   embed_queryFakeEmbeddings.embed_queryA   s    ""$$r    N)__name__
__module____qualname____firstlineno____doc__int__annotations__r   floatr   r   strr   r"   __static_attributes__r$   r   r   r   r      sy    $L I+DU D 6T#Y 64U3D 6 6 % %U % %r   r   c                       \ rS rSr% Sr\\S'    S\S\\   4S jr	\
S\S\4S j5       r\S	\\   S\\\      4S
 j5       r\S\S\\   4S j5       rSrg)DeterministicFakeEmbeddingF   a  Deterministic fake embedding model for unit testing purposes.

This embedding model creates embeddings by sampling from a normal distribution
with a seed based on the hash of the text.

!!! danger "Toy model"
    Do not use this outside of testing, as it is not a real embedding model.

Instantiate:
    ```python
    from langchain_core.embeddings import DeterministicFakeEmbedding

    embed = DeterministicFakeEmbedding(size=100)
    ```

Embed single text:
    ```python
    input_text = "The meaning of life is 42"
    vector = embed.embed_query(input_text)
    print(vector[:3])
    ```
    ```python
    [-0.700234640213188, -0.581266257710429, -1.1328482266445354]
    ```

Embed multiple texts:
    ```python
    input_texts = ["Document 1...", "Document 2..."]
    vectors = embed.embed_documents(input_texts)
    print(len(vectors))
    # The first 3 coordinates for the first vector
    print(vectors[0][:3])
    ```
    ```python
    2
    [-0.5670477847544458, -0.31403828652395727, -0.5840547508955257]
    ```
r	   seedr
   c                     [         R                  R                  U5      n[        UR	                  U R
                  S95      $ r   )r   r   r   r   r   r	   )r   r2   rngs      r   r   )DeterministicFakeEmbedding._get_embeddingq   s0    ii##D)CJJDIIJ.//r   r   c                     [        [        R                  " U R                  S5      5      R	                  5       S5      S-  $ )z@Get a seed for the random generator, using the hash of the text.zutf-8r   i )r*   hashlibsha256encode	hexdigest)r   s    r   	_get_seed$DeterministicFakeEmbedding._get_seedv   s1     7>>$++g"67AACRH5PPr   r   c                 h    U Vs/ s H   o R                  U R                  U5      S9PM"     sn$ s  snf N)r2   r   r;   r   s      r   r   *DeterministicFakeEmbedding.embed_documents{   s/    EJKU##):#;UKKKs   '/c                 >    U R                  U R                  U5      S9$ r>   r?   r!   s     r   r"   &DeterministicFakeEmbedding.embed_query   s    ""t(<"==r   r$   N)r%   r&   r'   r(   r)   r*   r+   r   r,   r   staticmethodr-   r;   r   r   r"   r.   r$   r   r   r0   r0   F   s    %N I+03 04; 0
 Q Q Q Q LT#Y L4U3D L L > >U > >r   r0   )r)   
contextlibr7   pydanticr   typing_extensionsr   langchain_core.embeddingsr   suppressImportErrornumpyr   r   r0   r$   r   r   <module>rK      sV    G    & 0% &3%Z 3%l;>Y ;>u &%s   A
A#