Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 23 additions & 63 deletions docs/bigquery-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,33 +16,15 @@ Authentication / Configuration
and
:meth:`from_service_account_p12 <gcloud.bigquery.client.Client.from_service_account_p12>`.

- After setting ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GCLOUD_PROJECT``
environment variables, create an instance of
- After setting :envvar:`GOOGLE_APPLICATION_CREDENTIALS` and
:envvar:`GCLOUD_PROJECT` environment variables, create an instance of
:class:`Client <gcloud.bigquery.client.Client>`.

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client()

- Override the credentials inferred from the environment by passing explicit
``credentials`` to one of the alternative ``classmethod`` factories,
:meth:`gcloud.bigquery.client.Client.from_service_account_json`:

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client.from_service_account_json('/path/to/creds.json')

or :meth:`gcloud.bigquery.client.Client.from_service_account_p12`:

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client.from_service_account_p12(
... '/path/to/creds.p12', 'jrandom@example.com')


Projects
--------

Expand Down Expand Up @@ -83,54 +65,35 @@ policies to tables as they are created:
Dataset operations
~~~~~~~~~~~~~~~~~~

Create a new dataset for the client's project:

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client()
>>> dataset = client.dataset('dataset_name')
>>> dataset.create() # API request

Check for the existence of a dataset:
List datasets for the client's project:

This comment was marked as spam.

This comment was marked as spam.


.. doctest::
.. literalinclude:: bigquery_snippets.py
:start-after: [START client_list_datasets]
:end-before: [END client_list_datasets]

>>> from gcloud import bigquery
>>> client = bigquery.Client()
>>> dataset = client.dataset('dataset_name')
>>> dataset.exists() # API request
True
Create a new dataset for the client's project:

List datasets for the client's project:
.. literalinclude:: bigquery_snippets.py
:start-after: [START dataset_create]
:end-before: [END dataset_create]

.. doctest::
Check for the existence of a dataset:

>>> from gcloud import bigquery
>>> client = bigquery.Client()
>>> datasets, next_page_token = client.list_datasets() # API request
>>> [dataset.name for dataset in datasets]
['dataset_name']
.. literalinclude:: bigquery_snippets.py
:start-after: [START dataset_exists]
:end-before: [END dataset_exists]

Refresh metadata for a dataset (to pick up changes made by another client):

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client()
>>> dataset = client.dataset('dataset_name')
>>> dataset.reload() # API request
.. literalinclude:: bigquery_snippets.py
:start-after: [START dataset_reload]
:end-before: [END dataset_reload]

Patch metadata for a dataset:

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client()
>>> dataset = client.dataset('dataset_name')
>>> one_day_ms = 24 * 60 * 60 * 1000
>>> dataset.patch(description='Description goes here',
... default_table_expiration_ms=one_day_ms) # API request
.. literalinclude:: bigquery_snippets.py
:start-after: [START dataset_patch]
:end-before: [END dataset_patch]

Replace the ACL for a dataset, and update all writeable fields:

Expand All @@ -147,12 +110,9 @@ Replace the ACL for a dataset, and update all writeable fields:

Delete a dataset:

.. doctest::

>>> from gcloud import bigquery
>>> client = bigquery.Client()
>>> dataset = client.dataset('dataset_name')
>>> dataset.delete() # API request
.. literalinclude:: bigquery_snippets.py
:start-after: [START dataset_delete]
:end-before: [END dataset_delete]


Tables
Expand Down
198 changes: 198 additions & 0 deletions docs/bigquery_snippets.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Testable usage examples for Google Cloud BigQuery API wrapper

Each example function takes a ``client`` argument (which must be an instance
of :class:`gcloud.bigquery.client.Client`) and uses it to perform a task with
the API.

To facilitate running the examples as system tests, each example is also passed
a ``to_delete`` list; the function adds to the list any objects created which
need to be deleted during teardown.
"""

import time

from gcloud.bigquery.client import Client


def snippet(func):
"""Mark ``func`` as a snippet example function."""
func._snippet = True
return func


def _millis():
return time.time() * 1000


@snippet
def client_list_datasets(client, to_delete): # pylint: disable=unused-argument
"""List datasets for a project."""

def do_something_with(sub): # pylint: disable=unused-argument
pass

# [START client_list_datasets]
datasets, token = client.list_datasets() # API request
while True:
for dataset in datasets:
do_something_with(dataset)
if token is None:
break
datasets, token = client.list_datasets(page_token=token) # API request
# [END client_list_datasets]


@snippet
def dataset_create(client, to_delete):
"""Create a dataset."""
DATASET_NAME = 'dataset_create_%d' % (_millis(),)

# [START dataset_create]
dataset = client.dataset(DATASET_NAME)
dataset.create() # API request
# [END dataset_create]

to_delete.append(dataset)


@snippet
def dataset_exists(client, to_delete):
"""Test existence of a dataset."""
DATASET_NAME = 'dataset_exists_%d' % (_millis(),)
dataset = client.dataset(DATASET_NAME)
to_delete.append(dataset)

# [START dataset_exists]
assert not dataset.exists() # API request
dataset.create() # API request
assert dataset.exists() # API request

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

# [END dataset_exists]


@snippet
def dataset_reload(client, to_delete):
"""Reload a dataset's metadata."""
DATASET_NAME = 'dataset_reload_%d' % (_millis(),)
ORIGINAL_DESCRIPTION = 'Original description'
LOCALLY_CHANGED_DESCRIPTION = 'Locally-changed description'
dataset = client.dataset(DATASET_NAME)
dataset.description = ORIGINAL_DESCRIPTION
dataset.create()
to_delete.append(dataset)

# [START dataset_reload]
assert dataset.description == ORIGINAL_DESCRIPTION
dataset.description = LOCALLY_CHANGED_DESCRIPTION
assert dataset.description == LOCALLY_CHANGED_DESCRIPTION
dataset.reload() # API request
assert dataset.description == ORIGINAL_DESCRIPTION

This comment was marked as spam.

This comment was marked as spam.

# [END dataset_reload]


@snippet
def dataset_patch(client, to_delete):
"""Patch a dataset's metadata."""
DATASET_NAME = 'dataset_patch_%d' % (_millis(),)
ORIGINAL_DESCRIPTION = 'Original description'
PATCHED_DESCRIPTION = 'Patched description'
dataset = client.dataset(DATASET_NAME)
dataset.description = ORIGINAL_DESCRIPTION
dataset.create()
to_delete.append(dataset)

# [START dataset_patch]
ONE_DAY_MS = 24 * 60 * 60 * 1000
assert dataset.description == ORIGINAL_DESCRIPTION
dataset.patch(
description=PATCHED_DESCRIPTION,
default_table_expiration_ms=ONE_DAY_MS
) # API request
assert dataset.description == PATCHED_DESCRIPTION
assert dataset.default_table_expiration_ms == ONE_DAY_MS
# [END dataset_patch]


@snippet
def dataset_update(client, to_delete):
"""Update a dataset's metadata."""
DATASET_NAME = 'dataset_update_%d' % (_millis(),)
ORIGINAL_DESCRIPTION = 'Original description'
UPDATED_DESCRIPTION = 'Updated description'
dataset = client.dataset(DATASET_NAME)
dataset.description = ORIGINAL_DESCRIPTION
dataset.create()
to_delete.append(dataset)
dataset.reload()

# [START dataset_update]
from gcloud.bigquery import AccessGrant
assert dataset.description == ORIGINAL_DESCRIPTION
assert dataset.default_table_expiration_ms is None
grant = AccessGrant(
role='READER', entity_type='domain', entity_id='example.com')
assert grant not in dataset.access_grants
ONE_DAY_MS = 24 * 60 * 60 * 1000
dataset.description = UPDATED_DESCRIPTION
dataset.default_table_expiration_ms = ONE_DAY_MS
grants = list(dataset.access_grants)
grants.append(grant)
dataset.access_grants = grants
dataset.update() # API request
assert dataset.description == UPDATED_DESCRIPTION
assert dataset.default_table_expiration_ms == ONE_DAY_MS
assert grant in dataset.access_grants
# [END dataset_update]


@snippet
def dataset_delete(client, to_delete): # pylint: disable=unused-argument
"""Delete a dataset."""
DATASET_NAME = 'dataset_delete_%d' % (_millis(),)
dataset = client.dataset(DATASET_NAME)
dataset.create()

# [START dataset_delete]
assert dataset.exists() # API request
dataset.delete()
assert not dataset.exists() # API request
# [END dataset_delete]


def _find_examples():
funcs = [obj for obj in globals().values()
if getattr(obj, '_snippet', False)]
for func in sorted(funcs, key=lambda f: f.func_code.co_firstlineno):
yield func

This comment was marked as spam.



def main():
client = Client()
for example in _find_examples():
to_delete = []
print('%-25s: %s' % (
example.func_name, example.func_doc))
try:
example(client, to_delete)
except AssertionError as e:
print(' FAIL: %s' % (e,))
except Exception as e: # pylint: disable=broad-except
print(' ERROR: %r' % (e,))
for item in to_delete:
item.delete()

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.


if __name__ == '__main__':
main()
2 changes: 1 addition & 1 deletion docs/pubsub_snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
of :class:`gcloud.pubsub.client.Client`) and uses it to perform a task with
the API.

To facility running the examples as system tests, each example is also passed
To facilitate running the examples as system tests, each example is also passed
a ``to_delete`` list; the function adds to the list any objects created which
need to be deleted during teardown.
"""
Expand Down