Skip to content

Commit 12bbcb5

Browse files
authored
feat(dependencies)!: Upgrade to google-cloud-datastore 2.x (#841)
* feat(dependencies)!: Upgrade to google-cloud-datastore >= 2.7.2 This is the first version of google-cloud-datastore that does not support Python 3.6, which aligns it with our supported versions. Also upgrade some other minimum dependencies to align. BREAKING CHANGE: If you are using both the NDB and the base google-cloud-datastore classes in your codebase, you will have to update your use of those google-cloud-datastore classes. See https://github.com/googleapis/python-datastore/blob/main/UPGRADING.md for recommendations for upgrading to google-cloud-datastore 2.x. tweak deps * fix: Update module imports * fix: Fix enum namespaces * fix: Update datastore stub creation * fix: Update API capitalization/casing The new Datastore GRPC transport has different method naming conventions than the old stub did. * fix: Correct access to SerializeToString, CopyFrom, and MergeFromString * test: Fix tests
1 parent 8f5ec70 commit 12bbcb5

File tree

12 files changed

+171
-145
lines changed

12 files changed

+171
-145
lines changed

google/cloud/ndb/_cache.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -738,4 +738,4 @@ def global_cache_key(key):
738738
Returns:
739739
bytes: The cache key.
740740
"""
741-
return _PREFIX + key.to_protobuf().SerializeToString()
741+
return _PREFIX + key.to_protobuf()._pb.SerializeToString()

google/cloud/ndb/_datastore_api.py

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020

2121
from google.api_core import exceptions as core_exceptions
2222
from google.cloud.datastore import helpers
23-
from google.cloud.datastore_v1.proto import datastore_pb2
24-
from google.cloud.datastore_v1.proto import entity_pb2
23+
from google.cloud.datastore_v1.types import datastore as datastore_pb2
24+
from google.cloud.datastore_v1.types import entity as entity_pb2
2525

2626
from google.cloud.ndb import context as context_module
2727
from google.cloud.ndb import _batch
@@ -33,9 +33,9 @@
3333
from google.cloud.ndb import tasklets
3434
from google.cloud.ndb import utils
3535

36-
EVENTUAL = datastore_pb2.ReadOptions.EVENTUAL
36+
EVENTUAL = datastore_pb2.ReadOptions.ReadConsistency.EVENTUAL
3737
EVENTUAL_CONSISTENCY = EVENTUAL # Legacy NDB
38-
STRONG = datastore_pb2.ReadOptions.STRONG
38+
STRONG = datastore_pb2.ReadOptions.ReadConsistency.STRONG
3939

4040
_DEFAULT_TIMEOUT = None
4141
_NOT_FOUND = object()
@@ -144,7 +144,7 @@ def lookup(key, options):
144144
if not key_locked:
145145
if result:
146146
entity_pb = entity_pb2.Entity()
147-
entity_pb.MergeFromString(result)
147+
entity_pb._pb.MergeFromString(result)
148148

149149
elif use_datastore:
150150
lock = yield _cache.global_lock_for_read(cache_key, result)
@@ -165,7 +165,7 @@ def lookup(key, options):
165165
if use_global_cache and not key_locked:
166166
if entity_pb is not _NOT_FOUND:
167167
expires = context._global_cache_timeout(key, options)
168-
serialized = entity_pb.SerializeToString()
168+
serialized = entity_pb._pb.SerializeToString()
169169
yield _cache.global_compare_and_swap(
170170
cache_key, serialized, expires=expires
171171
)
@@ -211,7 +211,7 @@ def add(self, key):
211211
Returns:
212212
tasklets.Future: A future for the eventual result.
213213
"""
214-
todo_key = key.to_protobuf().SerializeToString()
214+
todo_key = key.to_protobuf()._pb.SerializeToString()
215215
future = tasklets.Future(info="Lookup({})".format(key))
216216
self.todo.setdefault(todo_key, []).append(future)
217217
return future
@@ -221,7 +221,7 @@ def idle_callback(self):
221221
keys = []
222222
for todo_key in self.todo.keys():
223223
key_pb = entity_pb2.Key()
224-
key_pb.ParseFromString(todo_key)
224+
key_pb._pb.ParseFromString(todo_key)
225225
keys.append(key_pb)
226226

227227
read_options = get_read_options(self.options)
@@ -264,20 +264,20 @@ def lookup_callback(self, rpc):
264264
if results.deferred:
265265
next_batch = _batch.get_batch(type(self), self.options)
266266
for key in results.deferred:
267-
todo_key = key.SerializeToString()
267+
todo_key = key._pb.SerializeToString()
268268
next_batch.todo.setdefault(todo_key, []).extend(self.todo[todo_key])
269269

270270
# For all missing keys, set result to _NOT_FOUND and let callers decide
271271
# how to handle
272272
for result in results.missing:
273-
todo_key = result.entity.key.SerializeToString()
273+
todo_key = result.entity.key._pb.SerializeToString()
274274
for future in self.todo[todo_key]:
275275
future.set_result(_NOT_FOUND)
276276

277277
# For all found entities, set the result on their corresponding futures
278278
for result in results.found:
279279
entity = result.entity
280-
todo_key = entity.key.SerializeToString()
280+
todo_key = entity.key._pb.SerializeToString()
281281
for future in self.todo[todo_key]:
282282
future.set_result(entity)
283283

@@ -306,7 +306,7 @@ def _datastore_lookup(keys, read_options, retries=None, timeout=None):
306306
read_options=read_options,
307307
)
308308

309-
return make_call("Lookup", request, retries=retries, timeout=timeout)
309+
return make_call("lookup", request, retries=retries, timeout=timeout)
310310

311311

312312
def get_read_options(options, default_read_consistency=None):
@@ -375,7 +375,7 @@ def put(entity, options):
375375
lock = yield _cache.global_lock_for_write(cache_key)
376376
else:
377377
expires = context._global_cache_timeout(entity.key, options)
378-
cache_value = entity_pb.SerializeToString()
378+
cache_value = entity_pb._pb.SerializeToString()
379379
yield _cache.global_set(cache_key, cache_value, expires=expires)
380380

381381
if use_datastore:
@@ -725,7 +725,7 @@ def allocate_ids_callback(self, rpc, mutations, futures):
725725
# Update mutations with complete keys
726726
response = rpc.result()
727727
for mutation, key, future in zip(mutations, response.keys, futures):
728-
mutation.upsert.key.CopyFrom(key)
728+
mutation.upsert.key._pb.CopyFrom(key._pb)
729729
future.set_result(key)
730730

731731
@tasklets.tasklet
@@ -863,9 +863,9 @@ def _datastore_commit(mutations, transaction, retries=None, timeout=None):
863863
:class:`google.cloud.datastore_v1.datastore_pb2.CommitResponse`
864864
"""
865865
if transaction is None:
866-
mode = datastore_pb2.CommitRequest.NON_TRANSACTIONAL
866+
mode = datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
867867
else:
868-
mode = datastore_pb2.CommitRequest.TRANSACTIONAL
868+
mode = datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
869869

870870
client = context_module.get_context().client
871871
request = datastore_pb2.CommitRequest(
@@ -875,7 +875,7 @@ def _datastore_commit(mutations, transaction, retries=None, timeout=None):
875875
transaction=transaction,
876876
)
877877

878-
return make_call("Commit", request, retries=retries, timeout=timeout)
878+
return make_call("commit", request, retries=retries, timeout=timeout)
879879

880880

881881
def allocate(keys, options):
@@ -992,7 +992,7 @@ def _datastore_allocate_ids(keys, retries=None, timeout=None):
992992
client = context_module.get_context().client
993993
request = datastore_pb2.AllocateIdsRequest(project_id=client.project, keys=keys)
994994

995-
return make_call("AllocateIds", request, retries=retries, timeout=timeout)
995+
return make_call("allocate_ids", request, retries=retries, timeout=timeout)
996996

997997

998998
@tasklets.tasklet
@@ -1048,7 +1048,7 @@ def _datastore_begin_transaction(read_only, retries=None, timeout=None):
10481048
project_id=client.project, transaction_options=options
10491049
)
10501050

1051-
return make_call("BeginTransaction", request, retries=retries, timeout=timeout)
1051+
return make_call("begin_transaction", request, retries=retries, timeout=timeout)
10521052

10531053

10541054
@tasklets.tasklet
@@ -1089,4 +1089,4 @@ def _datastore_rollback(transaction, retries=None, timeout=None):
10891089
project_id=client.project, transaction=transaction
10901090
)
10911091

1092-
return make_call("Rollback", request, retries=retries, timeout=timeout)
1092+
return make_call("rollback", request, retries=retries, timeout=timeout)

google/cloud/ndb/_datastore_query.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,9 @@
2222

2323
from google.cloud import environment_vars
2424

25-
from google.cloud.datastore_v1.proto import datastore_pb2
26-
from google.cloud.datastore_v1.proto import entity_pb2
27-
from google.cloud.datastore_v1.proto import query_pb2
25+
from google.cloud.datastore_v1.types import datastore as datastore_pb2
26+
from google.cloud.datastore_v1.types import entity as entity_pb2
27+
from google.cloud.datastore_v1.types import query as query_pb2
2828
from google.cloud.datastore import helpers, Key
2929

3030
from google.cloud.ndb import context as context_module
@@ -38,24 +38,24 @@
3838
log = logging.getLogger(__name__)
3939

4040
MoreResultsType = query_pb2.QueryResultBatch.MoreResultsType
41-
NO_MORE_RESULTS = MoreResultsType.Value("NO_MORE_RESULTS")
42-
NOT_FINISHED = MoreResultsType.Value("NOT_FINISHED")
43-
MORE_RESULTS_AFTER_LIMIT = MoreResultsType.Value("MORE_RESULTS_AFTER_LIMIT")
41+
NO_MORE_RESULTS = MoreResultsType.NO_MORE_RESULTS
42+
NOT_FINISHED = MoreResultsType.NOT_FINISHED
43+
MORE_RESULTS_AFTER_LIMIT = MoreResultsType.MORE_RESULTS_AFTER_LIMIT
4444

4545
ResultType = query_pb2.EntityResult.ResultType
46-
RESULT_TYPE_FULL = ResultType.Value("FULL")
47-
RESULT_TYPE_KEY_ONLY = ResultType.Value("KEY_ONLY")
48-
RESULT_TYPE_PROJECTION = ResultType.Value("PROJECTION")
46+
RESULT_TYPE_FULL = ResultType.FULL
47+
RESULT_TYPE_KEY_ONLY = ResultType.KEY_ONLY
48+
RESULT_TYPE_PROJECTION = ResultType.PROJECTION
4949

50-
DOWN = query_pb2.PropertyOrder.DESCENDING
51-
UP = query_pb2.PropertyOrder.ASCENDING
50+
DOWN = query_pb2.PropertyOrder.Direction.DESCENDING
51+
UP = query_pb2.PropertyOrder.Direction.ASCENDING
5252

5353
FILTER_OPERATORS = {
54-
"=": query_pb2.PropertyFilter.EQUAL,
55-
"<": query_pb2.PropertyFilter.LESS_THAN,
56-
"<=": query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL,
57-
">": query_pb2.PropertyFilter.GREATER_THAN,
58-
">=": query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL,
54+
"=": query_pb2.PropertyFilter.Operator.EQUAL,
55+
"<": query_pb2.PropertyFilter.Operator.LESS_THAN,
56+
"<=": query_pb2.PropertyFilter.Operator.LESS_THAN_OR_EQUAL,
57+
">": query_pb2.PropertyFilter.Operator.GREATER_THAN,
58+
">=": query_pb2.PropertyFilter.Operator.GREATER_THAN_OR_EQUAL,
5959
}
6060

6161
_KEY_NOT_IN_CACHE = object()
@@ -77,7 +77,7 @@ def make_filter(name, op, value):
7777
property=query_pb2.PropertyReference(name=name),
7878
op=FILTER_OPERATORS[op],
7979
)
80-
helpers._set_protobuf_value(filter_pb.value, value)
80+
helpers._set_protobuf_value(filter_pb.value._pb, value)
8181
return filter_pb
8282

8383

@@ -92,7 +92,7 @@ def make_composite_and_filter(filter_pbs):
9292
query_pb2.CompositeFilter: The new composite filter.
9393
"""
9494
return query_pb2.CompositeFilter(
95-
op=query_pb2.CompositeFilter.AND,
95+
op=query_pb2.CompositeFilter.Operator.AND,
9696
filters=[_filter_pb(filter_pb) for filter_pb in filter_pbs],
9797
)
9898

@@ -683,7 +683,7 @@ def has_next_async(self):
683683
next_result = result_sets[0].next()
684684

685685
# Check to see if it's a duplicate
686-
hash_key = next_result.result_pb.entity.key.SerializeToString()
686+
hash_key = next_result.result_pb.entity.key._pb.SerializeToString()
687687
if hash_key in self._seen_keys:
688688
continue
689689

@@ -811,9 +811,9 @@ def _compare(self, other):
811811
).flat_path
812812
else:
813813
this_value_pb = self.result_pb.entity.properties[order.name]
814-
this_value = helpers._get_value_from_value_pb(this_value_pb)
814+
this_value = helpers._get_value_from_value_pb(this_value_pb._pb)
815815
other_value_pb = other.result_pb.entity.properties[order.name]
816-
other_value = helpers._get_value_from_value_pb(other_value_pb)
816+
other_value = helpers._get_value_from_value_pb(other_value_pb._pb)
817817

818818
# Compare key paths if ordering by key property
819819
if isinstance(this_value, Key):
@@ -935,19 +935,19 @@ def _query_to_protobuf(query):
935935
ancestor_pb = query.ancestor._key.to_protobuf()
936936
ancestor_filter_pb = query_pb2.PropertyFilter(
937937
property=query_pb2.PropertyReference(name="__key__"),
938-
op=query_pb2.PropertyFilter.HAS_ANCESTOR,
938+
op=query_pb2.PropertyFilter.Operator.HAS_ANCESTOR,
939939
)
940-
ancestor_filter_pb.value.key_value.CopyFrom(ancestor_pb)
940+
ancestor_filter_pb.value.key_value._pb.CopyFrom(ancestor_pb._pb)
941941

942942
if filter_pb is None:
943943
filter_pb = ancestor_filter_pb
944944

945945
elif isinstance(filter_pb, query_pb2.CompositeFilter):
946-
filter_pb.filters.add(property_filter=ancestor_filter_pb)
946+
filter_pb.filters._pb.add(property_filter=ancestor_filter_pb._pb)
947947

948948
else:
949949
filter_pb = query_pb2.CompositeFilter(
950-
op=query_pb2.CompositeFilter.AND,
950+
op=query_pb2.CompositeFilter.Operator.AND,
951951
filters=[
952952
_filter_pb(filter_pb),
953953
_filter_pb(ancestor_filter_pb),
@@ -969,7 +969,7 @@ def _query_to_protobuf(query):
969969
query_pb.offset = query.offset
970970

971971
if query.limit:
972-
query_pb.limit.value = query.limit
972+
query_pb._pb.limit.value = query.limit
973973

974974
return query_pb
975975

@@ -1016,7 +1016,7 @@ def _datastore_run_query(query):
10161016
read_options=read_options,
10171017
)
10181018
response = yield _datastore_api.make_call(
1019-
"RunQuery", request, timeout=query.timeout
1019+
"run_query", request, timeout=query.timeout
10201020
)
10211021
utils.logging_debug(log, response)
10221022
raise tasklets.Return(response)

google/cloud/ndb/client.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,13 @@
1919
import os
2020
import requests
2121

22-
from google.api_core import client_info
22+
from google.api_core.gapic_v1 import client_info
2323
from google.cloud import environment_vars
2424
from google.cloud import _helpers
2525
from google.cloud import client as google_client
26-
from google.cloud.datastore_v1.gapic import datastore_client
27-
from google.cloud.datastore_v1.proto import datastore_pb2_grpc
26+
from google.cloud.datastore_v1.services.datastore.transports import (
27+
grpc as datastore_grpc,
28+
)
2829

2930
from google.cloud.ndb import __version__
3031
from google.cloud.ndb import context as context_module
@@ -35,7 +36,7 @@
3536
user_agent="google-cloud-ndb/{}".format(__version__)
3637
)
3738

38-
DATASTORE_API_HOST = datastore_client.DatastoreClient.SERVICE_ADDRESS.rsplit(":", 1)[0]
39+
DATASTORE_API_HOST = "datastore.googleapis.com"
3940

4041

4142
def _get_gcd_project():
@@ -114,14 +115,17 @@ def __init__(self, project=None, namespace=None, credentials=None):
114115

115116
if emulator:
116117
channel = grpc.insecure_channel(self.host)
117-
118118
else:
119-
user_agent = _CLIENT_INFO.to_user_agent()
119+
user_agent = self.client_info.to_user_agent()
120120
channel = _helpers.make_secure_channel(
121121
self._credentials, user_agent, self.host
122122
)
123-
124-
self.stub = datastore_pb2_grpc.DatastoreStub(channel)
123+
self.stub = datastore_grpc.DatastoreGrpcTransport(
124+
host=self.host,
125+
credentials=credentials,
126+
client_info=self.client_info,
127+
channel=channel,
128+
)
125129

126130
@contextlib.contextmanager
127131
def context(

google/cloud/ndb/key.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -204,7 +204,7 @@ class Key(object):
204204
>>> reference
205205
app: "example"
206206
path {
207-
Element {
207+
element {
208208
type: "Kind"
209209
id: 1337
210210
}
@@ -681,13 +681,13 @@ def reference(self):
681681
>>> key = ndb.Key("Trampoline", 88, project="xy", namespace="zt")
682682
>>> key.reference()
683683
app: "xy"
684+
name_space: "zt"
684685
path {
685-
Element {
686+
element {
686687
type: "Trampoline"
687688
id: 88
688689
}
689690
}
690-
name_space: "zt"
691691
<BLANKLINE>
692692
"""
693693
if self._reference is None:

0 commit comments

Comments
 (0)