Skip to content

SentinelManagedConnection searches for new master upon connection failure (#3560) #1

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/integration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ jobs:
max-parallel: 15
fail-fast: false
matrix:
redis-version: ['8.2-M01-pre', '${{ needs.redis_version.outputs.CURRENT }}', '7.4.4', '7.2.9']
redis-version: ['8.2-RC1-pre', '${{ needs.redis_version.outputs.CURRENT }}', '7.4.4', '7.2.9']
python-version: ['3.9', '3.13']
parser-backend: ['plain']
event-loop: ['asyncio']
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/spellcheck.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
- name: Check Spelling
uses: rojopolis/spellcheck-github-actions@0.49.0
uses: rojopolis/spellcheck-github-actions@0.51.0
with:
config_path: .github/spellcheck-settings.yml
task_name: Markdown
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ Start a redis via docker (for Redis versions < 8.0):

``` bash
docker run -p 6379:6379 -it redis/redis-stack:latest

```
To install redis-py, simply:

``` bash
Expand Down Expand Up @@ -209,4 +209,4 @@ Special thanks to:
system.
- Paul Hubbard for initial packaging support.

[![Redis](./docs/_static/logo-redis.svg)](https://redis.io)
[![Redis](./docs/_static/logo-redis.svg)](https://redis.io)
232 changes: 232 additions & 0 deletions doctests/home_prob_dts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,232 @@
# EXAMPLE: home_prob_dts
"""
Probabilistic data type examples:
https://redis.io/docs/latest/develop/connect/clients/python/redis-py/prob
"""

# HIDE_START
import redis
r = redis.Redis(decode_responses=True)
# HIDE_END
# REMOVE_START
r.delete(
"recorded_users", "other_users",
"group:1", "group:2", "both_groups",
"items_sold",
"male_heights", "female_heights", "all_heights",
"top_3_songs"
)
# REMOVE_END

# STEP_START bloom
res1 = r.bf().madd("recorded_users", "andy", "cameron", "david", "michelle")
print(res1) # >>> [1, 1, 1, 1]

res2 = r.bf().exists("recorded_users", "cameron")
print(res2) # >>> 1

res3 = r.bf().exists("recorded_users", "kaitlyn")
print(res3) # >>> 0
# STEP_END
# REMOVE_START
assert res1 == [1, 1, 1, 1]
assert res2 == 1
assert res3 == 0
# REMOVE_END

# STEP_START cuckoo
res4 = r.cf().add("other_users", "paolo")
print(res4) # >>> 1

res5 = r.cf().add("other_users", "kaitlyn")
print(res5) # >>> 1

res6 = r.cf().add("other_users", "rachel")
print(res6) # >>> 1

res7 = r.cf().mexists("other_users", "paolo", "rachel", "andy")
print(res7) # >>> [1, 1, 0]

res8 = r.cf().delete("other_users", "paolo")
print(res8) # >>> 1

res9 = r.cf().exists("other_users", "paolo")
print(res9) # >>> 0
# STEP_END
# REMOVE_START
assert res4 == 1
assert res5 == 1
assert res6 == 1
assert res7 == [1, 1, 0]
assert res8 == 1
assert res9 == 0
# REMOVE_END

# STEP_START hyperloglog
res10 = r.pfadd("group:1", "andy", "cameron", "david")
print(res10) # >>> 1

res11 = r.pfcount("group:1")
print(res11) # >>> 3

res12 = r.pfadd("group:2", "kaitlyn", "michelle", "paolo", "rachel")
print(res12) # >>> 1

res13 = r.pfcount("group:2")
print(res13) # >>> 4

res14 = r.pfmerge("both_groups", "group:1", "group:2")
print(res14) # >>> True

res15 = r.pfcount("both_groups")
print(res15) # >>> 7
# STEP_END
# REMOVE_START
assert res10 == 1
assert res11 == 3
assert res12 == 1
assert res13 == 4
assert res14
assert res15 == 7
# REMOVE_END

# STEP_START cms
# Specify that you want to keep the counts within 0.01
# (1%) of the true value with a 0.005 (0.5%) chance
# of going outside this limit.
res16 = r.cms().initbyprob("items_sold", 0.01, 0.005)
print(res16) # >>> True

# The parameters for `incrby()` are two lists. The count
# for each item in the first list is incremented by the
# value at the same index in the second list.
res17 = r.cms().incrby(
"items_sold",
["bread", "tea", "coffee", "beer"], # Items sold
[300, 200, 200, 100]
)
print(res17) # >>> [300, 200, 200, 100]

res18 = r.cms().incrby(
"items_sold",
["bread", "coffee"],
[100, 150]
)
print(res18) # >>> [400, 350]

res19 = r.cms().query("items_sold", "bread", "tea", "coffee", "beer")
print(res19) # >>> [400, 200, 350, 100]
# STEP_END
# REMOVE_START
assert res16
assert res17 == [300, 200, 200, 100]
assert res18 == [400, 350]
assert res19 == [400, 200, 350, 100]
# REMOVE_END

# STEP_START tdigest
res20 = r.tdigest().create("male_heights")
print(res20) # >>> True

res21 = r.tdigest().add(
"male_heights",
[175.5, 181, 160.8, 152, 177, 196, 164]
)
print(res21) # >>> OK

res22 = r.tdigest().min("male_heights")
print(res22) # >>> 152.0

res23 = r.tdigest().max("male_heights")
print(res23) # >>> 196.0

res24 = r.tdigest().quantile("male_heights", 0.75)
print(res24) # >>> 181

# Note that the CDF value for 181 is not exactly
# 0.75. Both values are estimates.
res25 = r.tdigest().cdf("male_heights", 181)
print(res25) # >>> [0.7857142857142857]

res26 = r.tdigest().create("female_heights")
print(res26) # >>> True

res27 = r.tdigest().add(
"female_heights",
[155.5, 161, 168.5, 170, 157.5, 163, 171]
)
print(res27) # >>> OK

res28 = r.tdigest().quantile("female_heights", 0.75)
print(res28) # >>> [170]

res29 = r.tdigest().merge(
"all_heights", 2, "male_heights", "female_heights"
)
print(res29) # >>> OK

res30 = r.tdigest().quantile("all_heights", 0.75)
print(res30) # >>> [175.5]
# STEP_END
# REMOVE_START
assert res20
assert res21 == "OK"
assert res22 == 152.0
assert res23 == 196.0
assert res24 == [181]
assert res25 == [0.7857142857142857]
assert res26
assert res27 == "OK"
assert res28 == [170]
assert res29 == "OK"
assert res30 == [175.5]
# REMOVE_END

# STEP_START topk
# The `reserve()` method creates the Top-K object with
# the given key. The parameters are the number of items
# in the ranking and values for `width`, `depth`, and
# `decay`, described in the Top-K reference page.
res31 = r.topk().reserve("top_3_songs", 3, 7, 8, 0.9)
print(res31) # >>> True

# The parameters for `incrby()` are two lists. The count
# for each item in the first list is incremented by the
# value at the same index in the second list.
res32 = r.topk().incrby(
"top_3_songs",
[
"Starfish Trooper",
"Only one more time",
"Rock me, Handel",
"How will anyone know?",
"Average lover",
"Road to everywhere"
],
[
3000,
1850,
1325,
3890,
4098,
770
]
)
print(res32)
# >>> [None, None, None, 'Rock me, Handel', 'Only one more time', None]

res33 = r.topk().list("top_3_songs")
print(res33)
# >>> ['Average lover', 'How will anyone know?', 'Starfish Trooper']

res34 = r.topk().query(
"top_3_songs", "Starfish Trooper", "Road to everywhere"
)
print(res34) # >>> [1, 0]
# STEP_END
# REMOVE_START
assert res31
assert res32 == [None, None, None, 'Rock me, Handel', 'Only one more time', None]
assert res33 == ['Average lover', 'How will anyone know?', 'Starfish Trooper']
assert res34 == [1, 0]
# REMOVE_END
3 changes: 2 additions & 1 deletion redis/_parsers/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -676,7 +676,8 @@ def parse_client_info(value):
"omem",
"tot-mem",
}:
client_info[int_key] = int(client_info[int_key])
if int_key in client_info:
client_info[int_key] = int(client_info[int_key])
return client_info


Expand Down
13 changes: 9 additions & 4 deletions redis/asyncio/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,13 +295,18 @@ async def connect(self):
"""Connects to the Redis server if not already connected"""
await self.connect_check_health(check_health=True)

async def connect_check_health(self, check_health: bool = True):
async def connect_check_health(
self, check_health: bool = True, retry_socket_connect: bool = True
):
if self.is_connected:
return
try:
await self.retry.call_with_retry(
lambda: self._connect(), lambda error: self.disconnect()
)
if retry_socket_connect:
await self.retry.call_with_retry(
lambda: self._connect(), lambda error: self.disconnect()
)
else:
await self._connect()
except asyncio.CancelledError:
raise # in 3.7 and earlier, this is an Exception, not BaseException
except (socket.timeout, asyncio.TimeoutError):
Expand Down
17 changes: 10 additions & 7 deletions redis/asyncio/sentinel.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,12 @@
SSLConnection,
)
from redis.commands import AsyncSentinelCommands
from redis.exceptions import ConnectionError, ReadOnlyError, ResponseError, TimeoutError
from redis.utils import str_if_bytes
from redis.exceptions import (
ConnectionError,
ReadOnlyError,
ResponseError,
TimeoutError,
)


class MasterNotFoundError(ConnectionError):
Expand All @@ -37,11 +41,10 @@ def __repr__(self):

async def connect_to(self, address):
self.host, self.port = address
await super().connect()
if self.connection_pool.check_connection:
await self.send_command("PING")
if str_if_bytes(await self.read_response()) != "PONG":
raise ConnectionError("PING failed")
await self.connect_check_health(
check_health=self.connection_pool.check_connection,
retry_socket_connect=False,
)

async def _connect_retry(self):
if self._reader:
Expand Down
8 changes: 4 additions & 4 deletions redis/commands/search/field.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):

``name`` is the name of the field.

``algorithm`` can be "FLAT" or "HNSW".
``algorithm`` can be "FLAT", "HNSW", or "SVS-VAMANA".

``attributes`` each algorithm can have specific attributes. Some of them
are mandatory and some of them are optional. See
Expand All @@ -194,10 +194,10 @@ def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):
if sort or noindex:
raise DataError("Cannot set 'sortable' or 'no_index' in Vector fields.")

if algorithm.upper() not in ["FLAT", "HNSW"]:
if algorithm.upper() not in ["FLAT", "HNSW", "SVS-VAMANA"]:
raise DataError(
"Realtime vector indexing supporting 2 Indexing Methods:"
"'FLAT' and 'HNSW'."
"Realtime vector indexing supporting 3 Indexing Methods:"
"'FLAT', 'HNSW', and 'SVS-VAMANA'."
)

attr_li = []
Expand Down
13 changes: 9 additions & 4 deletions redis/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,13 +378,18 @@ def connect(self):
"Connects to the Redis server if not already connected"
self.connect_check_health(check_health=True)

def connect_check_health(self, check_health: bool = True):
def connect_check_health(
self, check_health: bool = True, retry_socket_connect: bool = True
):
if self._sock:
return
try:
sock = self.retry.call_with_retry(
lambda: self._connect(), lambda error: self.disconnect(error)
)
if retry_socket_connect:
sock = self.retry.call_with_retry(
lambda: self._connect(), lambda error: self.disconnect(error)
)
else:
sock = self._connect()
except socket.timeout:
raise TimeoutError("Timeout connecting to server")
except OSError as e:
Expand Down
Loading
Loading