Skip to content

Commit 37b4833

Browse files
committed
reverted client and test changes
1 parent d0ba7b0 commit 37b4833

20 files changed

+2445
-2813
lines changed

google/cloud/bigtable/data/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,10 +50,10 @@
5050
__all__ = (
5151
"BigtableDataClientAsync",
5252
"TableAsync",
53-
"MutationsBatcherAsync",
5453
"RowKeySamples",
5554
"ReadRowsQuery",
5655
"RowRange",
56+
"MutationsBatcherAsync",
5757
"Mutation",
5858
"RowMutationEntry",
5959
"SetCell",

google/cloud/bigtable/data/_async/_mutate_rows.py

Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -15,36 +15,38 @@
1515
from __future__ import annotations
1616

1717
from typing import Sequence, TYPE_CHECKING
18+
from dataclasses import dataclass
1819
import functools
1920

2021
from google.api_core import exceptions as core_exceptions
2122
from google.api_core import retry as retries
23+
import google.cloud.bigtable_v2.types.bigtable as types_pb
2224
import google.cloud.bigtable.data.exceptions as bt_exceptions
2325
from google.cloud.bigtable.data._helpers import _make_metadata
2426
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
2527
from google.cloud.bigtable.data._helpers import _retry_exception_factory
2628

2729
# mutate_rows requests are limited to this number of mutations
2830
from google.cloud.bigtable.data.mutations import _MUTATE_ROWS_REQUEST_MUTATION_LIMIT
29-
from google.cloud.bigtable.data.mutations import _EntryWithProto
30-
31-
from google.cloud.bigtable.data._sync.cross_sync import CrossSync
3231

3332
if TYPE_CHECKING:
33+
from google.cloud.bigtable_v2.services.bigtable.async_client import (
34+
BigtableAsyncClient,
35+
)
3436
from google.cloud.bigtable.data.mutations import RowMutationEntry
37+
from google.cloud.bigtable.data._async.client import TableAsync
3538

36-
if CrossSync.is_async:
37-
from google.cloud.bigtable_v2.services.bigtable.async_client import (
38-
BigtableAsyncClient,
39-
)
4039

41-
CrossSync.add_mapping("GapicClient", BigtableAsyncClient)
40+
@dataclass
41+
class _EntryWithProto:
42+
"""
43+
A dataclass to hold a RowMutationEntry and its corresponding proto representation.
44+
"""
45+
46+
entry: RowMutationEntry
47+
proto: types_pb.MutateRowsRequest.Entry
4248

4349

44-
@CrossSync.export_sync(
45-
path="google.cloud.bigtable.data._sync._mutate_rows._MutateRowsOperation",
46-
add_mapping_for_name="_MutateRowsOperation",
47-
)
4850
class _MutateRowsOperationAsync:
4951
"""
5052
MutateRowsOperation manages the logic of sending a set of row mutations,
@@ -64,11 +66,10 @@ class _MutateRowsOperationAsync:
6466
If not specified, the request will run until operation_timeout is reached.
6567
"""
6668

67-
@CrossSync.convert
6869
def __init__(
6970
self,
70-
gapic_client: "CrossSync.GapicClient",
71-
table: "CrossSync.Table",
71+
gapic_client: "BigtableAsyncClient",
72+
table: "TableAsync",
7273
mutation_entries: list["RowMutationEntry"],
7374
operation_timeout: float,
7475
attempt_timeout: float | None,
@@ -99,7 +100,7 @@ def __init__(
99100
bt_exceptions._MutateRowsIncomplete,
100101
)
101102
sleep_generator = retries.exponential_sleep_generator(0.01, 2, 60)
102-
self._operation = lambda: CrossSync.retry_target(
103+
self._operation = retries.retry_target_async(
103104
self._run_attempt,
104105
self.is_retryable,
105106
sleep_generator,
@@ -114,7 +115,6 @@ def __init__(
114115
self.remaining_indices = list(range(len(self.mutations)))
115116
self.errors: dict[int, list[Exception]] = {}
116117

117-
@CrossSync.convert
118118
async def start(self):
119119
"""
120120
Start the operation, and run until completion
@@ -124,7 +124,7 @@ async def start(self):
124124
"""
125125
try:
126126
# trigger mutate_rows
127-
CrossSync.rm_aio(await self._operation())
127+
await self._operation
128128
except Exception as exc:
129129
# exceptions raised by retryable are added to the list of exceptions for all unfinalized mutations
130130
incomplete_indices = self.remaining_indices.copy()
@@ -151,7 +151,6 @@ async def start(self):
151151
all_errors, len(self.mutations)
152152
)
153153

154-
@CrossSync.convert
155154
async def _run_attempt(self):
156155
"""
157156
Run a single attempt of the mutate_rows rpc.
@@ -172,14 +171,12 @@ async def _run_attempt(self):
172171
return
173172
# make gapic request
174173
try:
175-
result_generator = CrossSync.rm_aio(
176-
await self._gapic_fn(
177-
timeout=next(self.timeout_generator),
178-
entries=request_entries,
179-
retry=None,
180-
)
174+
result_generator = await self._gapic_fn(
175+
timeout=next(self.timeout_generator),
176+
entries=request_entries,
177+
retry=None,
181178
)
182-
async for result_list in CrossSync.rm_aio(result_generator):
179+
async for result_list in result_generator:
183180
for result in result_list.entries:
184181
# convert sub-request index to global index
185182
orig_idx = active_request_indices[result.index]

google/cloud/bigtable/data/_async/_read_rows.py

Lines changed: 28 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,13 @@
1515

1616
from __future__ import annotations
1717

18-
from typing import Sequence
18+
from typing import (
19+
TYPE_CHECKING,
20+
AsyncGenerator,
21+
AsyncIterable,
22+
Awaitable,
23+
Sequence,
24+
)
1925

2026
from google.cloud.bigtable_v2.types import ReadRowsRequest as ReadRowsRequestPB
2127
from google.cloud.bigtable_v2.types import ReadRowsResponse as ReadRowsResponsePB
@@ -26,21 +32,22 @@
2632
from google.cloud.bigtable.data.read_rows_query import ReadRowsQuery
2733
from google.cloud.bigtable.data.exceptions import InvalidChunk
2834
from google.cloud.bigtable.data.exceptions import _RowSetComplete
29-
from google.cloud.bigtable.data.exceptions import _ResetRow
3035
from google.cloud.bigtable.data._helpers import _attempt_timeout_generator
3136
from google.cloud.bigtable.data._helpers import _make_metadata
3237
from google.cloud.bigtable.data._helpers import _retry_exception_factory
3338

3439
from google.api_core import retry as retries
3540
from google.api_core.retry import exponential_sleep_generator
3641

37-
from google.cloud.bigtable.data._sync.cross_sync import CrossSync
42+
if TYPE_CHECKING:
43+
from google.cloud.bigtable.data._async.client import TableAsync
44+
45+
46+
class _ResetRow(Exception):
47+
def __init__(self, chunk):
48+
self.chunk = chunk
3849

3950

40-
@CrossSync.export_sync(
41-
path="google.cloud.bigtable.data._sync._read_rows._ReadRowsOperation",
42-
add_mapping_for_name="_ReadRowsOperation",
43-
)
4451
class _ReadRowsOperationAsync:
4552
"""
4653
ReadRowsOperation handles the logic of merging chunks from a ReadRowsResponse stream
@@ -75,7 +82,7 @@ class _ReadRowsOperationAsync:
7582
def __init__(
7683
self,
7784
query: ReadRowsQuery,
78-
table: "CrossSync.Table",
85+
table: "TableAsync",
7986
operation_timeout: float,
8087
attempt_timeout: float,
8188
retryable_exceptions: Sequence[type[Exception]] = (),
@@ -101,22 +108,22 @@ def __init__(
101108
self._last_yielded_row_key: bytes | None = None
102109
self._remaining_count: int | None = self.request.rows_limit or None
103110

104-
def start_operation(self) -> CrossSync.Iterable[Row]:
111+
def start_operation(self) -> AsyncGenerator[Row, None]:
105112
"""
106113
Start the read_rows operation, retrying on retryable errors.
107114
108115
Yields:
109116
Row: The next row in the stream
110117
"""
111-
return CrossSync.retry_target_stream(
118+
return retries.retry_target_stream_async(
112119
self._read_rows_attempt,
113120
self._predicate,
114121
exponential_sleep_generator(0.01, 60, multiplier=2),
115122
self.operation_timeout,
116123
exception_factory=_retry_exception_factory,
117124
)
118125

119-
def _read_rows_attempt(self) -> CrossSync.Iterable[Row]:
126+
def _read_rows_attempt(self) -> AsyncGenerator[Row, None]:
120127
"""
121128
Attempt a single read_rows rpc call.
122129
This function is intended to be wrapped by retry logic,
@@ -152,10 +159,9 @@ def _read_rows_attempt(self) -> CrossSync.Iterable[Row]:
152159
chunked_stream = self.chunk_stream(gapic_stream)
153160
return self.merge_rows(chunked_stream)
154161

155-
@CrossSync.convert
156162
async def chunk_stream(
157-
self, stream: CrossSync.Awaitable[CrossSync.Iterable[ReadRowsResponsePB]]
158-
) -> CrossSync.Iterable[ReadRowsResponsePB.CellChunk]:
163+
self, stream: Awaitable[AsyncIterable[ReadRowsResponsePB]]
164+
) -> AsyncGenerator[ReadRowsResponsePB.CellChunk, None]:
159165
"""
160166
process chunks out of raw read_rows stream
161167
@@ -164,7 +170,7 @@ async def chunk_stream(
164170
Yields:
165171
ReadRowsResponsePB.CellChunk: the next chunk in the stream
166172
"""
167-
async for resp in CrossSync.rm_aio(await stream):
173+
async for resp in await stream:
168174
# extract proto from proto-plus wrapper
169175
resp = resp._pb
170176

@@ -205,12 +211,9 @@ async def chunk_stream(
205211
current_key = None
206212

207213
@staticmethod
208-
@CrossSync.convert(
209-
replace_symbols={"__aiter__": "__iter__", "__anext__": "__next__"}
210-
)
211214
async def merge_rows(
212-
chunks: CrossSync.Iterable[ReadRowsResponsePB.CellChunk] | None,
213-
) -> CrossSync.Iterable[Row]:
215+
chunks: AsyncGenerator[ReadRowsResponsePB.CellChunk, None] | None
216+
) -> AsyncGenerator[Row, None]:
214217
"""
215218
Merge chunks into rows
216219
@@ -225,8 +228,8 @@ async def merge_rows(
225228
# For each row
226229
while True:
227230
try:
228-
c = CrossSync.rm_aio(await it.__anext__())
229-
except CrossSync.StopIteration:
231+
c = await it.__anext__()
232+
except StopAsyncIteration:
230233
# stream complete
231234
return
232235
row_key = c.row_key
@@ -274,7 +277,7 @@ async def merge_rows(
274277
buffer = [value]
275278
while c.value_size > 0:
276279
# throws when premature end
277-
c = CrossSync.rm_aio(await it.__anext__())
280+
c = await it.__anext__()
278281

279282
t = c.timestamp_micros
280283
cl = c.labels
@@ -306,7 +309,7 @@ async def merge_rows(
306309
if c.commit_row:
307310
yield Row(row_key, cells)
308311
break
309-
c = CrossSync.rm_aio(await it.__anext__())
312+
c = await it.__anext__()
310313
except _ResetRow as e:
311314
c = e.chunk
312315
if (
@@ -319,7 +322,7 @@ async def merge_rows(
319322
):
320323
raise InvalidChunk("reset row with data")
321324
continue
322-
except CrossSync.StopIteration:
325+
except StopAsyncIteration:
323326
raise InvalidChunk("premature end of stream")
324327

325328
@staticmethod

0 commit comments

Comments
 (0)