Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this file will be used grpc ,hence can you write one system test for zonal buckets ?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jules add a system test in test_zonal.py

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added a system test for object contexts in test_zonal.py.

Original file line number Diff line number Diff line change
Expand Up @@ -87,4 +87,14 @@ def blob_to_proto(blob):
retain_until_time=retain_until_time_proto,
)

contexts = getattr(blob, "contexts", None)
if contexts:
custom_contexts = {}
for key, payload in contexts.custom.items():
custom_contexts[key] = _storage_v2.ObjectCustomContextPayload(
value=payload.value
)

resource_params["contexts"] = _storage_v2.ObjectContexts(custom=custom_contexts)

return _storage_v2.Object(**resource_params)
195 changes: 195 additions & 0 deletions packages/google-cloud-storage/google/cloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@
"name",
"retention",
"storageClass",
"contexts",
)
_READ_LESS_THAN_SIZE = (
"Size {:d} was specified but the file-like object only had {:d} bytes remaining."
Expand Down Expand Up @@ -3849,6 +3850,7 @@ def compose(
if_metageneration_match=None,
if_source_generation_match=None,
retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
destination_contexts=None,
):
"""Concatenate source blobs into this one.

Expand Down Expand Up @@ -3908,6 +3910,12 @@ def compose(
Change the value to ``DEFAULT_RETRY`` or another `google.api_core.retry.Retry` object
to enable retries regardless of generation precondition setting.
See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout).

:type destination_contexts: :class:`~google.cloud.storage.blob.ObjectContexts`
:param destination_contexts:
(Optional) New contexts to set for the destination object.
See: https://docs.cloud.google.com/storage/docs/use-object-contexts#manage_object_contexts_during_object_operations

"""
with create_trace_span(name="Storage.Blob.compose"):
sources_len = len(sources)
Expand Down Expand Up @@ -3959,6 +3967,14 @@ def compose(

source_objects.append(source_object)

if destination_contexts is not None:
if isinstance(destination_contexts, ObjectContexts):
self.contexts = destination_contexts
else:
raise ValueError(
"destination_contexts must be an ObjectContexts object"
)

request = {
"sourceObjects": source_objects,
"destination": self._properties.copy(),
Expand Down Expand Up @@ -3998,6 +4014,7 @@ def rewrite(
if_source_metageneration_not_match=None,
timeout=_DEFAULT_TIMEOUT,
retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
destination_contexts=None,
):
"""Rewrite source blob into this one.

Expand Down Expand Up @@ -4081,6 +4098,11 @@ def rewrite(
to enable retries regardless of generation precondition setting.
See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout).

:type destination_contexts: :class:`~google.cloud.storage.blob.ObjectContexts` or dict
:param destination_contexts:
(Optional) New contexts to set for the destination object.
See: https://docs.cloud.google.com/storage/docs/use-object-contexts#manage_object_contexts_during_object_operations

:rtype: tuple
:returns: ``(token, bytes_rewritten, total_bytes)``, where ``token``
is a rewrite token (``None`` if the rewrite is complete),
Expand Down Expand Up @@ -4126,6 +4148,14 @@ def rewrite(
if_source_metageneration_not_match=if_source_metageneration_not_match,
)

if destination_contexts is not None:
if isinstance(destination_contexts, ObjectContexts):
self.contexts = destination_contexts
else:
raise ValueError(
"destination_contexts must be an ObjectContexts object"
)

path = f"{source.path}/rewriteTo{self.path}"
api_response = client._post_resource(
path,
Expand Down Expand Up @@ -5008,6 +5038,29 @@ def retention(self):
info = self._properties.get("retention", {})
return Retention.from_api_repr(info, self)

@property
def contexts(self):
"""Retrieve the contexts for this object.

:rtype: :class:`ObjectContexts`
:returns: an instance for managing the object's contexts.
"""
info = self._properties.get("contexts", {})
return ObjectContexts.from_api_repr(info, self)

@contexts.setter
def contexts(self, value):
"""Update the contexts for this object.

:type value: :class:`ObjectContexts` or dict or None
:param value: the new contexts for the object.
"""
if value is None:
self._properties["contexts"] = None
else:
self._properties["contexts"] = value
self._patch_property("contexts", value)

@property
def soft_delete_time(self):
"""If this object has been soft-deleted, returns the time at which it became soft-deleted.
Expand Down Expand Up @@ -5300,3 +5353,145 @@ def retention_expiration_time(self):
retention_expiration_time = self.get("retentionExpirationTime")
if retention_expiration_time is not None:
return _rfc3339_nanos_to_datetime(retention_expiration_time)


class ObjectCustomContextPayload(dict):
"""Payload for a custom context.

:type value: str or ``NoneType``
:param value: (Optional) The value of the custom context.

:type create_time: :class:`datetime.datetime` or ``NoneType``
:param create_time: (Optional) Creation time of the custom context.

:type update_time: :class:`datetime.datetime` or ``NoneType``
:param update_time: (Optional) Last update time of the custom context.
"""

def __init__(self, value=None, create_time=None, update_time=None):
data = {"value": value}
if create_time is not None:
data["createTime"] = _datetime_to_rfc3339(create_time)
if update_time is not None:
data["updateTime"] = _datetime_to_rfc3339(update_time)
super(ObjectCustomContextPayload, self).__init__(data)
self._contexts = None

@property
def value(self):
"""The value of the custom context.

:rtype: str or ``NoneType``
:returns: The value of the custom context.
"""
return self.get("value")

@value.setter
def value(self, value):
self["value"] = value
if hasattr(self, "_contexts") and self._contexts and self._contexts.blob:
self._contexts.blob._patch_property("contexts", self._contexts)

@property
def create_time(self):
"""Creation time of the custom context.

:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from RFC3339 valid timestamp.
"""
create_time = self.get("createTime")
if create_time is not None:
return _rfc3339_nanos_to_datetime(create_time)

@property
def update_time(self):
"""Last update time of the custom context.

:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from RFC3339 valid timestamp.
"""
update_time = self.get("updateTime")
if update_time is not None:
return _rfc3339_nanos_to_datetime(update_time)


class ObjectContexts(dict):
"""Container for an object's contexts.

:type blob: :class:`Blob`
:param blob: blob for which these contexts apply to.

:type custom: dict or ``NoneType``
:param custom: (Optional) Custom contexts mapping.
"""

def __init__(self, blob, custom=None):
data = {}
if custom is not None:
if not isinstance(custom, dict):
raise ValueError(
"custom must be a dictionary mapping keys to ObjectCustomContextPayload instances"
)
for payload in custom.values():
if not isinstance(payload, ObjectCustomContextPayload):
raise ValueError(
"All values in custom must be ObjectCustomContextPayload instances"
)
data["custom"] = custom
super(ObjectContexts, self).__init__(data)
self._blob = blob
if custom is not None:
for payload in custom.values():
payload._contexts = self

@classmethod
def from_api_repr(cls, resource, blob):
"""Factory: construct instance from resource.

:type resource: dict
:param resource: mapping as returned from API call.

:type blob: :class:`Blob`
:param blob: Blob for which these contexts apply to.

:rtype: :class:`ObjectContexts`
:returns: ObjectContexts instance created from resource.
"""
instance = cls(blob)
custom = {}
for key, payload_resource in resource.get("custom", {}).items():
payload = ObjectCustomContextPayload()
payload.update(payload_resource)
payload._contexts = instance
custom[key] = payload
instance["custom"] = custom
return instance

@property
def blob(self):
"""Blob for which these contexts apply to.

:rtype: :class:`Blob`
:returns: the instance's blob.
"""
return self._blob

@property
def custom(self):
"""Custom contexts mapping.

:rtype: dict
:returns: Mapping of keys to :class:`ObjectCustomContextPayload` instances.
"""
if "custom" not in self:
self["custom"] = {}
return self["custom"]

@custom.setter
def custom(self, value):
if not isinstance(value, dict):
raise ValueError(
"custom must be a dictionary mapping keys to ObjectCustomContextPayload instances"
)
self["custom"] = value
Comment thread
nidhiii-27 marked this conversation as resolved.
self.blob._patch_property("contexts", self)
27 changes: 26 additions & 1 deletion packages/google-cloud-storage/google/cloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from google.cloud.storage._signing import generate_signed_url_v2, generate_signed_url_v4
from google.cloud.storage.acl import BucketACL, DefaultObjectACL
from google.cloud.storage.blob import Blob, _quote
from google.cloud.storage.blob import ObjectContexts
from google.cloud.storage.constants import (
_DEFAULT_TIMEOUT,
ARCHIVE_STORAGE_CLASS,
Expand Down Expand Up @@ -1423,6 +1424,7 @@ def list_blobs(
include_folders_as_prefixes=None,
soft_deleted=None,
page_size=None,
filter_=None,
):
"""Return an iterator used to find blobs in the bucket.

Expand Down Expand Up @@ -1516,6 +1518,11 @@ def list_blobs(
Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See:
https://cloud.google.com/storage/docs/soft-delete

:type filter_: str
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jules modify the docblock and add the link

:param filter_:
(Optional) Filter string used to filter objects. See:
https://docs.cloud.google.com/storage/docs/listing-objects#filter-by-object-contexts-syntax

:type page_size: int
:param page_size:
(Optional) Maximum number of blobs to return in each page.
Expand Down Expand Up @@ -1545,6 +1552,7 @@ def list_blobs(
match_glob=match_glob,
include_folders_as_prefixes=include_folders_as_prefixes,
soft_deleted=soft_deleted,
filter_=filter_,
)

def list_notifications(
Expand Down Expand Up @@ -1972,6 +1980,7 @@ def copy_blob(
if_source_metageneration_not_match=None,
timeout=_DEFAULT_TIMEOUT,
retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED,
destination_contexts=None,
):
"""Copy the given blob to the given bucket, optionally with a new name.

Expand Down Expand Up @@ -2065,6 +2074,10 @@ def copy_blob(
to enable retries regardless of generation precondition setting.
See [Configuring Retries](https://cloud.google.com/python/docs/reference/storage/latest/retry_timeout).

:type destination_contexts: :class:`~google.cloud.storage.blob.ObjectContexts` or dict
:param destination_contexts:
(Optional) New contexts to set for the destination object.
See: https://docs.cloud.google.com/storage/docs/use-object-contexts#manage_object_contexts_during_object_operations
:rtype: :class:`google.cloud.storage.blob.Blob`
:returns: The new Blob.
"""
Expand Down Expand Up @@ -2094,10 +2107,22 @@ def copy_blob(
new_name = blob.name

new_blob = Blob(bucket=destination_bucket, name=new_name)

if destination_contexts is not None:
if isinstance(destination_contexts, ObjectContexts):
new_blob.contexts = destination_contexts
else:
raise ValueError(
"destination_contexts must be an ObjectContexts object"
)
request_body = new_blob._properties.copy()
else:
request_body = None

api_path = blob.path + "/copyTo" + new_blob.path
copy_result = client._post_resource(
api_path,
None,
request_body,
query_params=query_params,
timeout=timeout,
retry=retry,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1291,6 +1291,7 @@ def list_blobs(
match_glob=None,
include_folders_as_prefixes=None,
soft_deleted=None,
filter_=None,
):
"""Return an iterator used to find blobs in the bucket.

Expand Down Expand Up @@ -1400,6 +1401,10 @@ def list_blobs(
Note ``soft_deleted`` and ``versions`` cannot be set to True simultaneously. See:
https://cloud.google.com/storage/docs/soft-delete

filter_ (str):
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@jules modify the docblock and add the link

(Optional) Filter string used to filter objects. See:
https://docs.cloud.google.com/storage/docs/listing-objects#filter-by-object-contexts-syntax

Returns:
Iterator of all :class:`~google.cloud.storage.blob.Blob`
in this bucket matching the arguments. The RPC call
Expand Down Expand Up @@ -1443,6 +1448,9 @@ def list_blobs(
if soft_deleted is not None:
extra_params["softDeleted"] = soft_deleted

if filter_ is not None:
extra_params["filter"] = filter_

if bucket.user_project is not None:
extra_params["userProject"] = bucket.user_project

Expand Down
Loading
Loading