Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions storage/gcloud/aio/storage/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,43 @@ async def upload_from_filename(
contents = await file_object.read()
return await self.upload(bucket, object_name, contents, **kwargs)

# https://cloud.google.com/storage/docs/json_api/v1/objects/compose
async def compose(
self, bucket: str, object_name: str,
source_object_names: List[str], *,
content_type: Optional[str] = None,
params: Optional[Dict[str, str]] = None,
headers: Optional[Dict[str, Any]] = None,
session: Optional[Session] = None,
timeout: int = DEFAULT_TIMEOUT,
) -> Dict[str, Any]:
url = (
f'{self._api_root_read}/{bucket}/o/'
f'{quote(object_name, safe="")}/compose'
)
headers = headers or {}
headers.update(await self._headers())
params = params or {}

payload: Dict[str, Any] = {
'sourceObjects': [{'name': name} for name in source_object_names],
}
if content_type:
payload['destination'] = {'contentType': content_type}
body = json.dumps(payload).encode('utf-8')
headers.update({
'Content-Length': str(len(body)),
'Content-Type': 'application/json; charset=UTF-8',
})

s = AioSession(session) if session else self.session
resp = await s.post(
url, headers=headers, params=params, timeout=timeout,
data=body,
)
data: Dict[str, Any] = await resp.json(content_type=None)
return data

@staticmethod
def _get_stream_len(stream: IO[AnyStr]) -> int:
current = stream.tell()
Expand Down
2 changes: 1 addition & 1 deletion storage/pyproject.rest.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "gcloud-rest-storage"
version = "9.4.0"
version = "9.5.0"
description = "Python Client for Google Cloud Storage"
readme = "README.rst"

Expand Down
2 changes: 1 addition & 1 deletion storage/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "gcloud-aio-storage"
version = "9.4.0"
version = "9.5.0"
description = "Python Client for Google Cloud Storage"
readme = "README.rst"

Expand Down
55 changes: 55 additions & 0 deletions storage/tests/integration/compose_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import uuid

import pytest
from gcloud.aio.auth import BUILD_GCLOUD_REST # pylint: disable=no-name-in-module
from gcloud.aio.storage import Storage

# Selectively load libraries based on the package
if BUILD_GCLOUD_REST:
from requests import Session
else:
from aiohttp import ClientSession as Session


@pytest.mark.asyncio
@pytest.mark.parametrize(
'shard_data,expected_data,content_type,file_extension', [
(['foo ', 'bar'], b'foo bar', 'text/plain', 'txt'),
(['{"foo":', '1,', '"bar":2}'],
b'{"foo":1,"bar":2}', 'application/json', 'json'),
],
)
async def test_compose(
bucket_name, creds, shard_data,
expected_data, content_type, file_extension,
):
def random_name():
return f'{uuid.uuid4().hex}/{uuid.uuid4().hex}.{file_extension}'

shard_names = [random_name() for _ in shard_data]
object_name = random_name()

async with Session() as session:
storage = Storage(service_file=creds, session=session)

for shard_name, shard_datum in zip(shard_names, shard_data):
await storage.upload(
bucket_name,
shard_name,
shard_datum,
metadata={
'Content-Disposition': 'inline',
},
)
res = await storage.compose(
bucket_name,
object_name,
shard_names,
content_type=content_type,
)

assert res['name'] == object_name
assert res['contentType'] == content_type

downloaded_data = await storage.download(bucket_name, res['name'])
assert downloaded_data == expected_data