1
1
import bz2
2
2
import csv
3
3
import os
4
- import time
4
+ import asyncio
5
5
from io import TextIOWrapper
6
6
7
7
import numpy as np
8
8
import pytest
9
9
import pytest_asyncio
10
10
import redis .asyncio as redis
11
- import redis .commands .search
12
11
import redis .commands .search .aggregation as aggregations
13
12
import redis .commands .search .reducers as reducers
14
13
from redis .commands .search import AsyncSearch
@@ -49,8 +48,8 @@ async def decoded_r(create_redis, stack_url):
49
48
async def waitForIndex (env , idx , timeout = None ):
50
49
delay = 0.1
51
50
while True :
52
- res = await env .execute_command ("FT.INFO" , idx )
53
51
try :
52
+ res = await env .execute_command ("FT.INFO" , idx )
54
53
if int (res [res .index ("indexing" ) + 1 ]) == 0 :
55
54
break
56
55
except ValueError :
@@ -62,7 +61,7 @@ async def waitForIndex(env, idx, timeout=None):
62
61
except ValueError :
63
62
break
64
63
65
- time .sleep (delay )
64
+ await asyncio .sleep (delay )
66
65
if timeout is not None :
67
66
timeout -= delay
68
67
if timeout <= 0 :
@@ -1765,7 +1764,7 @@ async def test_binary_and_text_fields(decoded_r: redis.Redis):
1765
1764
mixed_data = {"first_name" : "🐍python" , "vector_emb" : fake_vec .tobytes ()}
1766
1765
await decoded_r .hset (f"{ index_name } :1" , mapping = mixed_data )
1767
1766
1768
- schema = (
1767
+ schema = [
1769
1768
TagField ("first_name" ),
1770
1769
VectorField (
1771
1770
"embeddings_bio" ,
@@ -1776,14 +1775,15 @@ async def test_binary_and_text_fields(decoded_r: redis.Redis):
1776
1775
"DISTANCE_METRIC" : "COSINE" ,
1777
1776
},
1778
1777
),
1779
- )
1778
+ ]
1780
1779
1781
1780
await decoded_r .ft (index_name ).create_index (
1782
1781
fields = schema ,
1783
1782
definition = IndexDefinition (
1784
1783
prefix = [f"{ index_name } :" ], index_type = IndexType .HASH
1785
1784
),
1786
1785
)
1786
+ await waitForIndex (decoded_r , index_name )
1787
1787
1788
1788
query = (
1789
1789
Query ("*" )
@@ -1793,6 +1793,12 @@ async def test_binary_and_text_fields(decoded_r: redis.Redis):
1793
1793
result = await decoded_r .ft (index_name ).search (query = query , query_params = {})
1794
1794
docs = result .docs
1795
1795
1796
+ if len (docs ) == 0 :
1797
+ hash_content = await decoded_r .hget (f"{ index_name } :1" , "first_name" )
1798
+ assert len (docs ) > 0 , (
1799
+ f"Returned search results are empty. Result: { result } ; Hash: { hash_content } "
1800
+ )
1801
+
1796
1802
decoded_vec_from_search_results = np .frombuffer (
1797
1803
docs [0 ]["vector_emb" ], dtype = np .float32
1798
1804
)
0 commit comments