Skip to content
This repository was archived by the owner on Sep 12, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,26 @@ async def create_read_session(
are created and do not require manual clean-up by the
caller.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_create_read_session():
# Create a client
client = bigquery_storage_v1.BigQueryReadClient()

# Initialize request argument(s)
request = bigquery_storage_v1.CreateReadSessionRequest(
parent="parent_value",
)

# Make the request
response = client.create_read_session(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest, dict]):
The request object. Request message for
Expand Down Expand Up @@ -368,6 +388,27 @@ def read_rows(
Each request also returns a set of stream statistics
reflecting the current state of the stream.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_read_rows():
# Create a client
client = bigquery_storage_v1.BigQueryReadClient()

# Initialize request argument(s)
request = bigquery_storage_v1.ReadRowsRequest(
read_stream="read_stream_value",
)

# Make the request
stream = client.read_rows(request=request)

# Handle the response
for response in stream:
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.ReadRowsRequest, dict]):
The request object. Request message for `ReadRows`.
Expand Down Expand Up @@ -470,6 +511,26 @@ async def split_read_stream(
original[0-j] = primary[0-j] and original[j-n] = residual[0-m]
once the streams have been read to completion.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_split_read_stream():
# Create a client
client = bigquery_storage_v1.BigQueryReadClient()

# Initialize request argument(s)
request = bigquery_storage_v1.SplitReadStreamRequest(
name="name_value",
)

# Make the request
response = client.split_read_stream(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest, dict]):
The request object. Request message for
Expand Down
64 changes: 64 additions & 0 deletions google/cloud/bigquery_storage_v1/services/big_query_read/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,6 +471,27 @@ def create_read_session(
are created and do not require manual clean-up by the
caller.



.. code-block::

from google.cloud import bigquery_storage_v1

def sample_create_read_session():
# Create a client
client = bigquery_storage_v1.BigQueryReadClient()

# Initialize request argument(s)
request = bigquery_storage_v1.CreateReadSessionRequest(
parent="parent_value",
)

# Make the request
response = client.create_read_session(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.CreateReadSessionRequest, dict]):
The request object. Request message for
Expand Down Expand Up @@ -578,6 +599,28 @@ def read_rows(
Each request also returns a set of stream statistics
reflecting the current state of the stream.



.. code-block::

from google.cloud import bigquery_storage_v1

def sample_read_rows():
# Create a client
client = bigquery_storage_v1.BigQueryReadClient()

# Initialize request argument(s)
request = bigquery_storage_v1.ReadRowsRequest(
read_stream="read_stream_value",
)

# Make the request
stream = client.read_rows(request=request)

# Handle the response
for response in stream:
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.ReadRowsRequest, dict]):
The request object. Request message for `ReadRows`.
Expand Down Expand Up @@ -671,6 +714,27 @@ def split_read_stream(
original[0-j] = primary[0-j] and original[j-n] = residual[0-m]
once the streams have been read to completion.



.. code-block::

from google.cloud import bigquery_storage_v1

def sample_split_read_stream():
# Create a client
client = bigquery_storage_v1.BigQueryReadClient()

# Initialize request argument(s)
request = bigquery_storage_v1.SplitReadStreamRequest(
name="name_value",
)

# Make the request
response = client.split_read_stream(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.SplitReadStreamRequest, dict]):
The request object. Request message for
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ class BigQueryReadTransport(abc.ABC):

AUTH_SCOPES = (
"https://www.googleapis.com/auth/bigquery",
"https://www.googleapis.com/auth/bigquery.readonly",
"https://www.googleapis.com/auth/cloud-platform",
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,26 @@ async def create_write_stream(
stream is considered committed as soon as an acknowledgement is
received.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_create_write_stream():
# Create a client
client = bigquery_storage_v1.BigQueryWriteClient()

# Initialize request argument(s)
request = bigquery_storage_v1.CreateWriteStreamRequest(
parent="parent_value",
)

# Make the request
response = client.create_write_stream(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.CreateWriteStreamRequest, dict]):
The request object. Request message for
Expand Down Expand Up @@ -361,6 +381,37 @@ def append_rows(
rpc), and the stream is explicitly committed via the
``BatchCommitWriteStreams`` rpc.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_append_rows():
# Create a client
client = bigquery_storage_v1.BigQueryWriteClient()

# Initialize request argument(s)
request = bigquery_storage_v1.AppendRowsRequest(
write_stream="write_stream_value",
)

# This method expects an iterator which contains
# 'bigquery_storage_v1.AppendRowsRequest' objects
# Here we create a generator that yields a single `request` for
# demonstrative purposes.
requests = [request]

def request_generator():
for request in requests:
yield request

# Make the request
stream = client.append_rows(requests=request_generator())

# Handle the response
for response in stream:
print(response)

Args:
requests (AsyncIterator[`google.cloud.bigquery_storage_v1.types.AppendRowsRequest`]):
The request object AsyncIterator. Request message for `AppendRows`.
Expand Down Expand Up @@ -417,6 +468,25 @@ async def get_write_stream(
) -> stream.WriteStream:
r"""Gets information about a write stream.

.. code-block::

from google.cloud import bigquery_storage_v1

def sample_get_write_stream():
# Create a client
client = bigquery_storage_v1.BigQueryWriteClient()

# Initialize request argument(s)
request = bigquery_storage_v1.GetWriteStreamRequest(
name="name_value",
)

# Make the request
response = client.get_write_stream(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.GetWriteStreamRequest, dict]):
The request object. Request message for
Expand Down Expand Up @@ -500,6 +570,26 @@ async def finalize_write_stream(
r"""Finalize a write stream so that no new data can be appended to
the stream. Finalize is not supported on the '_default' stream.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_finalize_write_stream():
# Create a client
client = bigquery_storage_v1.BigQueryWriteClient()

# Initialize request argument(s)
request = bigquery_storage_v1.FinalizeWriteStreamRequest(
name="name_value",
)

# Make the request
response = client.finalize_write_stream(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.FinalizeWriteStreamRequest, dict]):
The request object. Request message for invoking
Expand Down Expand Up @@ -584,6 +674,27 @@ async def batch_commit_write_streams(
multiple times. Once a stream is committed, data in the stream
becomes available for read operations.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_batch_commit_write_streams():
# Create a client
client = bigquery_storage_v1.BigQueryWriteClient()

# Initialize request argument(s)
request = bigquery_storage_v1.BatchCommitWriteStreamsRequest(
parent="parent_value",
write_streams=['write_streams_value_1', 'write_streams_value_2'],
)

# Make the request
response = client.batch_commit_write_streams(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.BatchCommitWriteStreamsRequest, dict]):
The request object. Request message for
Expand Down Expand Up @@ -673,6 +784,26 @@ async def flush_rows(
Flush is not supported on the \_default stream, since it is not
BUFFERED.


.. code-block::

from google.cloud import bigquery_storage_v1

def sample_flush_rows():
# Create a client
client = bigquery_storage_v1.BigQueryWriteClient()

# Initialize request argument(s)
request = bigquery_storage_v1.FlushRowsRequest(
write_stream="write_stream_value",
)

# Make the request
response = client.flush_rows(request=request)

# Handle the response
print(response)

Args:
request (Union[google.cloud.bigquery_storage_v1.types.FlushRowsRequest, dict]):
The request object. Request message for `FlushRows`.
Expand Down
Loading