Skip to content

Commit 1723a26

Browse files
authored
feat!: Leverage new generator, proto-plus, for google-cloud-datastore (#104)
This uses the new microgenerator as the underlying transport for the cloud datastore client files in services/, as well as tests/gapic, are gen'd Major Changes: Discontinues python 2.7 support. release-as: 2.0.0-dev1
1 parent b6bc2f7 commit 1723a26

File tree

90 files changed

+11473
-11732
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

90 files changed

+11473
-11732
lines changed

.coveragerc

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,25 +14,23 @@
1414
# See the License for the specific language governing permissions and
1515
# limitations under the License.
1616

17-
# Generated by synthtool. DO NOT EDIT!
1817
[run]
1918
branch = True
20-
omit =
21-
google/cloud/__init__.py
22-
2319
[report]
2420
fail_under = 100
2521
show_missing = True
22+
omit =
23+
google/cloud/__init__.py
24+
google/cloud/datastore_v1/__init__.py
25+
google/cloud/datastore_admin_v1/__init__.py
26+
*/site-packages/*.py
2627
exclude_lines =
2728
# Re-enable the standard pragma
2829
pragma: NO COVER
2930
# Ignore debug-only repr
3031
def __repr__
31-
# Ignore abstract methods
32-
raise NotImplementedError
33-
omit =
34-
*/gapic/*.py
35-
*/proto/*.py
36-
*/core/*.py
37-
*/site-packages/*.py
38-
google/cloud/__init__.py
32+
# Ignore pkg_resources exceptions.
33+
# This is added at the module level as a safeguard for if someone
34+
# generates the code and tries to run it without pip installing. This
35+
# makes it virtually impossible to test properly.
36+
except pkg_resources.DistributionNotFound

.kokoro/samples/python3.6/common.cfg

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,12 @@ env_vars: {
1313
value: "py-3.6"
1414
}
1515

16+
# Declare build specific Cloud project.
17+
env_vars: {
18+
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
19+
value: "python-docs-samples-tests-py36"
20+
}
21+
1622
env_vars: {
1723
key: "TRAMPOLINE_BUILD_FILE"
1824
value: "github/python-datastore/.kokoro/test-samples.sh"

.kokoro/samples/python3.7/common.cfg

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,12 @@ env_vars: {
1313
value: "py-3.7"
1414
}
1515

16+
# Declare build specific Cloud project.
17+
env_vars: {
18+
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
19+
value: "python-docs-samples-tests-py37"
20+
}
21+
1622
env_vars: {
1723
key: "TRAMPOLINE_BUILD_FILE"
1824
value: "github/python-datastore/.kokoro/test-samples.sh"

.kokoro/samples/python3.8/common.cfg

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,12 @@ env_vars: {
1313
value: "py-3.8"
1414
}
1515

16+
# Declare build specific Cloud project.
17+
env_vars: {
18+
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
19+
value: "python-docs-samples-tests-py38"
20+
}
21+
1622
env_vars: {
1723
key: "TRAMPOLINE_BUILD_FILE"
1824
value: "github/python-datastore/.kokoro/test-samples.sh"

README.rst

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,7 @@ dependencies.
5353

5454
Supported Python Versions
5555
^^^^^^^^^^^^^^^^^^^^^^^^^
56-
Python >= 3.5
57-
58-
Deprecated Python Versions
59-
^^^^^^^^^^^^^^^^^^^^^^^^^^
60-
Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
56+
Python >= 3.6
6157

6258

6359
Mac/Linux

docs/admin_client.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Datastore Admin Client
22
======================
33

4-
.. automodule:: google.cloud.datastore_admin_v1.gapic.datastore_admin_client
4+
.. automodule:: google.cloud.datastore_admin_v1.services.datastore_admin.client
55
:members:
66
:show-inheritance:

google/cloud/datastore/_gapic.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@
1919

2020
from google.cloud._helpers import make_secure_channel
2121
from google.cloud._http import DEFAULT_USER_AGENT
22-
from google.cloud.datastore_v1.gapic import datastore_client
22+
from google.cloud.datastore_v1.services.datastore import client as datastore_client
23+
from google.cloud.datastore_v1.services.datastore.transports import grpc
2324

2425

2526
def make_datastore_api(client):
@@ -38,6 +39,7 @@ def make_datastore_api(client):
3839
else:
3940
channel = insecure_channel(host)
4041

42+
transport = grpc.DatastoreGrpcTransport(channel=channel)
4143
return datastore_client.DatastoreClient(
42-
channel=channel, client_info=client._client_info
44+
transport=transport, client_info=client._client_info
4345
)

google/cloud/datastore/_http.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
from google.cloud import _http as connection_module
2020
from google.cloud import exceptions
21-
from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2
21+
from google.cloud.datastore_v1.types import datastore as _datastore_pb2
2222

2323

2424
DATASTORE_API_HOST = "datastore.googleapis.com"
@@ -108,9 +108,9 @@ def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_c
108108
:rtype: :class:`google.protobuf.message.Message`
109109
:returns: The RPC message parsed from the response.
110110
"""
111-
req_data = request_pb.SerializeToString()
111+
req_data = request_pb._pb.SerializeToString()
112112
response = _request(http, project, method, req_data, base_url, client_info)
113-
return response_pb_cls.FromString(response)
113+
return response_pb_cls.deserialize(response)
114114

115115

116116
def build_api_url(project, method, base_url):

google/cloud/datastore/batch.py

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
"""
2323

2424
from google.cloud.datastore import helpers
25-
from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2
25+
from google.cloud.datastore_v1.types import datastore as _datastore_pb2
2626

2727

2828
class Batch(object):
@@ -219,7 +219,7 @@ def delete(self, key):
219219
raise ValueError("Key must be from same project as batch")
220220

221221
key_pb = key.to_protobuf()
222-
self._add_delete_key_pb().CopyFrom(key_pb)
222+
self._add_delete_key_pb()._pb.CopyFrom(key_pb._pb)
223223

224224
def begin(self):
225225
"""Begins a batch.
@@ -242,9 +242,9 @@ def _commit(self, retry, timeout):
242242
This is called by :meth:`commit`.
243243
"""
244244
if self._id is None:
245-
mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL
245+
mode = _datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
246246
else:
247-
mode = _datastore_pb2.CommitRequest.TRANSACTIONAL
247+
mode = _datastore_pb2.CommitRequest.Mode.TRANSACTIONAL
248248

249249
kwargs = {}
250250

@@ -255,8 +255,15 @@ def _commit(self, retry, timeout):
255255
kwargs["timeout"] = timeout
256256

257257
commit_response_pb = self._client._datastore_api.commit(
258-
self.project, mode, self._mutations, transaction=self._id, **kwargs
258+
request={
259+
"project_id": self.project,
260+
"mode": mode,
261+
"transaction": self._id,
262+
"mutations": self._mutations,
263+
},
264+
**kwargs,
259265
)
266+
260267
_, updated_keys = _parse_commit_response(commit_response_pb)
261268
# If the back-end returns without error, we are guaranteed that
262269
# ``commit`` will return keys that match (length and
@@ -337,11 +344,11 @@ def _assign_entity_to_pb(entity_pb, entity):
337344
:param entity: The entity being updated within the batch / transaction.
338345
"""
339346
bare_entity_pb = helpers.entity_to_protobuf(entity)
340-
bare_entity_pb.key.CopyFrom(bare_entity_pb.key)
341-
entity_pb.CopyFrom(bare_entity_pb)
347+
bare_entity_pb._pb.key.CopyFrom(bare_entity_pb._pb.key)
348+
entity_pb._pb.CopyFrom(bare_entity_pb._pb)
342349

343350

344-
def _parse_commit_response(commit_response_pb):
351+
def _parse_commit_response(commit_response):
345352
"""Extract response data from a commit response.
346353
347354
:type commit_response_pb: :class:`.datastore_pb2.CommitResponse`
@@ -352,6 +359,7 @@ def _parse_commit_response(commit_response_pb):
352359
:class:`.entity_pb2.Key` for each incomplete key
353360
that was completed in the commit.
354361
"""
362+
commit_response_pb = commit_response._pb
355363
mut_results = commit_response_pb.mutation_results
356364
index_updates = commit_response_pb.index_updates
357365
completed_keys = [

google/cloud/datastore/client.py

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,12 @@ def _extended_lookup(
185185
while loop_num < _MAX_LOOPS: # loop against possible deferred.
186186
loop_num += 1
187187
lookup_response = datastore_api.lookup(
188-
project, key_pbs, read_options=read_options, **kwargs
188+
request={
189+
"project_id": project,
190+
"keys": key_pbs,
191+
"read_options": read_options,
192+
},
193+
**kwargs,
189194
)
190195

191196
# Accumulate the new results.
@@ -535,7 +540,7 @@ def get_multi(
535540
helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred
536541
]
537542

538-
return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs]
543+
return [helpers.entity_from_protobuf(entity_pb._pb) for entity_pb in entity_pbs]
539544

540545
def put(self, entity, retry=None, timeout=None):
541546
"""Save an entity in the Cloud Datastore.
@@ -702,7 +707,8 @@ def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None):
702707
kwargs = _make_retry_timeout_kwargs(retry, timeout)
703708

704709
response_pb = self._datastore_api.allocate_ids(
705-
incomplete_key.project, incomplete_key_pbs, **kwargs
710+
request={"project_id": incomplete_key.project, "keys": incomplete_key_pbs},
711+
**kwargs,
706712
)
707713
allocated_ids = [
708714
allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys
@@ -871,8 +877,9 @@ def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None
871877
key_pbs.append(key.to_protobuf())
872878

873879
kwargs = _make_retry_timeout_kwargs(retry, timeout)
874-
self._datastore_api.reserve_ids(complete_key.project, key_pbs, **kwargs)
875-
880+
self._datastore_api.reserve_ids(
881+
request={"project_id": complete_key.project, "keys": key_pbs}, **kwargs
882+
)
876883
return None
877884

878885
def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None):
@@ -921,6 +928,8 @@ def reserve_ids_multi(self, complete_keys, retry=None, timeout=None):
921928

922929
kwargs = _make_retry_timeout_kwargs(retry, timeout)
923930
key_pbs = [key.to_protobuf() for key in complete_keys]
924-
self._datastore_api.reserve_ids(complete_keys[0].project, key_pbs, **kwargs)
931+
self._datastore_api.reserve_ids(
932+
request={"project_id": complete_keys[0].project, "keys": key_pbs}, **kwargs
933+
)
925934

926935
return None

0 commit comments

Comments
 (0)