From bce5c01ec3b4996849a46aebf7d94e8f160cf0cd Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 17 Apr 2018 09:29:34 +0300 Subject: [PATCH 01/45] src/main completed --- plugins/repository-gcs/build.gradle | 33 +- .../gcs/GoogleCloudStorageBlobStore.java | 362 +++++++----------- .../gcs/GoogleCloudStorageClientSettings.java | 76 ++-- .../gcs/GoogleCloudStoragePlugin.java | 76 +--- .../gcs/GoogleCloudStorageRepository.java | 18 +- .../gcs/GoogleCloudStorageService.java | 134 +++---- .../repositories/gcs/MockStorage.java | 46 +-- 7 files changed, 274 insertions(+), 471 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index bf2768a4312d8..98f5bdc52cbc6 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -27,20 +27,27 @@ esplugin { classname 'org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' } -versions << [ - 'google': '1.23.0', -] +//versions << [ +// 'google': '1.23.0', +//] dependencies { - compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" - compile "com.google.api-client:google-api-client:${versions.google}" - compile "com.google.oauth-client:google-oauth-client:${versions.google}" - compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" - compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" - compile "commons-logging:commons-logging:${versions.commonslogging}" - compile "commons-codec:commons-codec:${versions.commonscodec}" - compile "com.google.http-client:google-http-client:${versions.google}" - compile "com.google.http-client:google-http-client-jackson2:${versions.google}" + compile 'com.google.cloud:google-cloud-storage:1.25.0' + compile 'com.google.cloud:google-cloud-core:1.25.0' + compile 'com.google.cloud:google-cloud-core-http:1.25.0' + compile 'com.google.api:gax:1.24.0' + compile 'org.threeten:threetenbp:1.3.6' + compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' + compile 'com.google.auth:google-auth-library-credentials:0.9.1' +// compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" +// compile "com.google.api-client:google-api-client:${versions.google}" +// compile "com.google.oauth-client:google-oauth-client:${versions.google}" +// compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" +// compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" +// compile "commons-logging:commons-logging:${versions.commonslogging}" +// compile "commons-codec:commons-codec:${versions.commonscodec}" +// compile "com.google.http-client:google-http-client:${versions.google}" +// compile "com.google.http-client:google-http-client-jackson2:${versions.google}" } dependencyLicenses { @@ -105,5 +112,5 @@ integTestCluster { 'bin/elasticsearch-keystore', 'add-file', 'gcs.client.integration_test.credentials_file', "${serviceAccountFile.absolutePath}" /* Use a closure on the string to delay evaluation until tests are executed */ - setting 'gcs.client.integration_test.endpoint', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 225411f86dc49..ea9f540a704a6 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -19,16 +19,16 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.batch.BatchRequest; -import com.google.api.client.googleapis.batch.json.JsonBatchCallback; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.http.InputStreamContent; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.Objects; -import com.google.api.services.storage.model.StorageObject; +import com.google.cloud.ReadChannel; +import com.google.cloud.WriteChannel; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.BlobId; +import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.Bucket; +import com.google.cloud.storage.CopyWriter; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.Storage.BlobListOption; +import com.google.cloud.storage.Storage.CopyRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; @@ -36,42 +36,28 @@ import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.CountDown; - import java.io.IOException; import java.io.InputStream; +import java.nio.BufferUnderflowException; +import java.nio.ByteBuffer; import java.nio.file.NoSuchFileException; -import java.util.ArrayList; import java.util.Collection; -import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Spliterator; -import java.util.function.Consumer; -import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; - -import static java.net.HttpURLConnection.HTTP_NOT_FOUND; class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore { - /** - * Google Cloud Storage batch requests are limited to 1000 operations - **/ - private static final int MAX_BATCHING_REQUESTS = 999; - - private final Storage client; + private final Storage storage; private final String bucket; - GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storageClient) { + GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storage) { super(settings); this.bucket = bucket; - this.client = storageClient; - + this.storage = storage; if (doesBucketExist(bucket) == false) { throw new BlobStoreException("Bucket [" + bucket + "] does not exist"); } @@ -100,21 +86,13 @@ public void close() { boolean doesBucketExist(String bucketName) { try { return SocketAccess.doPrivilegedIOException(() -> { - try { - Bucket bucket = client.buckets().get(bucketName).execute(); - if (bucket != null) { - return Strings.hasText(bucket.getId()); - } - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - return false; - } - throw e; + final Bucket bucket = storage.get(bucketName); + if (bucket != null) { + return Strings.hasText(bucket.getName()); } return false; }); - } catch (IOException e) { + } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); } } @@ -125,33 +103,31 @@ boolean doesBucketExist(String bucketName) { * @param path base path of the blobs to list * @return a map of blob names and their metadata */ - Map listBlobs(String path) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, path, path)); + Map listBlobs(String prefix) throws IOException { + return listBlobsByPrefix(prefix, ""); } /** - * List all blobs in the bucket which have a prefix + * List all blobs in the bucket which have a prefix. * - * @param path base path of the blobs to list - * @param prefix prefix of the blobs to list - * @return a map of blob names and their metadata + * @param path + * base path of the blobs to list. This path is removed from the + * names of the blobs returned. + * @param prefix + * prefix of the blobs to list. + * @return a map of blob names and their metadata. */ Map listBlobsByPrefix(String path, String prefix) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, buildKey(path, prefix), path)); - } - - /** - * Lists all blobs in a given bucket - * - * @param bucketName name of the bucket - * @param path base path of the blobs to list - * @param pathToRemove if true, this path part is removed from blob name - * @return a map of blob names and their metadata - */ - private Map listBlobsByPath(String bucketName, String path, String pathToRemove) throws IOException { - return blobsStream(client, bucketName, path, MAX_BATCHING_REQUESTS) - .map(new BlobMetaDataConverter(pathToRemove)) - .collect(Collectors.toMap(PlainBlobMetaData::name, Function.identity())); + final String pathPrefix = buildKey(path, prefix); + final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); + SocketAccess.doPrivilegedVoidIOException(() -> { + storage.get(bucket).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { + assert blob.getName().startsWith(path); + final String suffixName = blob.getName().substring(path.length()); + mapBuilder.put(suffixName, new PlainBlobMetaData(suffixName, blob.getSize())); + }); + }); + return mapBuilder.immutableMap(); } /** @@ -161,17 +137,10 @@ private Map listBlobsByPath(String bucketName, String path * @return true if the blob exists, false otherwise */ boolean blobExists(String blobName) throws IOException { - try { - StorageObject blob = SocketAccess.doPrivilegedIOException(() -> client.objects().get(bucket, blobName).execute()); - if (blob != null) { - return Strings.hasText(blob.getId()); - } - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - return false; - } - throw e; + final BlobId blobId = BlobId.of(bucket, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + if (blob != null) { + return Strings.hasText(blob.getName()); } return false; } @@ -183,18 +152,43 @@ boolean blobExists(String blobName) throws IOException { * @return an InputStream */ InputStream readBlob(String blobName) throws IOException { - try { - return SocketAccess.doPrivilegedIOException(() -> { - Storage.Objects.Get object = client.objects().get(bucket, blobName); - return object.executeMediaAsInputStream(); + final BlobId blobId = BlobId.of(bucket, blobName); + final ReadChannel reader = SocketAccess.doPrivilegedIOException(() -> + { + final Blob blob = storage.get(blobId); + if (blob == null) { + return null; + } + return blob.reader(); }); - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - throw new NoSuchFileException(e.getMessage()); - } - throw e; + if (reader == null) { + throw new IOException("Blob [" + blobName + "] does not exit."); } + final ByteBuffer buffer = ByteBuffer.allocate(64 * 1024); + // first read pull data + buffer.flip(); + return new InputStream() { + + @Override + public int read() throws IOException { + try { + return buffer.get(); + } catch (final BufferUnderflowException e) { + // pull another chunck + buffer.clear(); + if (SocketAccess.doPrivilegedIOException(() -> reader.read(buffer)) < 0) { + return -1; + } + buffer.flip(); + return read(); + } + } + + @Override + public void close() throws IOException { + reader.close(); + } + }; } /** @@ -204,13 +198,23 @@ InputStream readBlob(String blobName) throws IOException { * @param blobSize expected size of the blob to be written */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { + final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); + final byte[] buffer = new byte[64 * 1024]; SocketAccess.doPrivilegedVoidIOException(() -> { - InputStreamContent stream = new InputStreamContent(null, inputStream); - stream.setLength(blobSize); - - Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream); - insert.setName(blobName); - insert.execute(); + long bytesWritten = 0; + try (WriteChannel writer = storage.writer(blobInfo)) { + int limit; + while ((limit = inputStream.read(buffer)) >= 0) { + try { + final int bs = writer.write(ByteBuffer.wrap(buffer, 0, limit)); + assert bs == limit : "Write should return only when all bytes have been written"; + bytesWritten += limit; + } catch (final Exception e) { + throw new IOException("Failed to write blob [" + blobName + "] into bucket [" + bucket + "].", e); + } + } + } + assert blobSize == bytesWritten : "InputStream unexpected size, expected [" + blobSize + "] got [" + bytesWritten + "]"; }); } @@ -220,10 +224,11 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { - if (!blobExists(blobName)) { + final BlobId blobId = BlobId.of(bucket, blobName); + final boolean deleted = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobId)); + if (deleted == false) { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } - SocketAccess.doPrivilegedIOException(() -> client.objects().delete(bucket, blobName).execute()); } /** @@ -232,7 +237,7 @@ void deleteBlob(String blobName) throws IOException { * @param prefix prefix of the buckets to delete */ void deleteBlobsByPrefix(String prefix) throws IOException { - deleteBlobs(listBlobsByPath(bucket, prefix, null).keySet()); + deleteBlobs(listBlobsByPrefix("", prefix).keySet()); } /** @@ -241,57 +246,22 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if (blobNames == null || blobNames.isEmpty()) { - return; - } - - if (blobNames.size() == 1) { - deleteBlob(blobNames.iterator().next()); + if ((blobNames == null) || blobNames.isEmpty()) { return; } - final List deletions = new ArrayList<>(Math.min(MAX_BATCHING_REQUESTS, blobNames.size())); - final Iterator blobs = blobNames.iterator(); - - SocketAccess.doPrivilegedVoidIOException(() -> { - while (blobs.hasNext()) { - // Create a delete request for each blob to delete - deletions.add(client.objects().delete(bucket, blobs.next())); - - if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) { - try { - // Deletions are executed using a batch request - BatchRequest batch = client.batch(); - - // Used to track successful deletions - CountDown countDown = new CountDown(deletions.size()); - - for (Storage.Objects.Delete delete : deletions) { - // Queue the delete request in batch - delete.queue(batch, new JsonBatchCallback() { - @Override - public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { - logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e - .getMessage()); - } - - @Override - public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException { - countDown.countDown(); - } - }); - } - - batch.execute(); - - if (countDown.isCountedDown() == false) { - throw new IOException("Failed to delete all [" + deletions.size() + "] blobs"); - } - } finally { - deletions.clear(); - } - } + final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); + final List deletedStatuses = storage.delete(blobIdsToDelete); + assert blobIdsToDelete.size() == deletedStatuses.size(); + boolean failed = false; + for (int i = 0; i < blobIdsToDelete.size(); i++) { + if (deletedStatuses.get(i) == false) { + logger.error("Failed to delete blob [{}] in bucket [{}].", blobIdsToDelete.get(i).getName(), bucket); + failed = true; } - }); + } + if (failed) { + throw new IOException("Failed to delete all [" + blobIdsToDelete.size() + "] blobs."); + } } /** @@ -300,104 +270,30 @@ public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOExceptio * @param sourceBlob name of the blob to move * @param targetBlob new name of the blob in the target bucket */ - void moveBlob(String sourceBlob, String targetBlob) throws IOException { - SocketAccess.doPrivilegedIOException(() -> { + void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { + final BlobId sourceBlobId = BlobId.of(bucket, sourceBlobName); + final BlobId targetBlobId = BlobId.of(bucket, targetBlobName); + final CopyRequest request = CopyRequest.newBuilder() + .setSource(sourceBlobId) + .setTarget(targetBlobId) + .build(); + SocketAccess.doPrivilegedVoidIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete - client.objects().copy(bucket, sourceBlob, bucket, targetBlob, null).execute(); - client.objects().delete(bucket, sourceBlob).execute(); - return null; + final CopyWriter copyWriter = storage.copy(request); + while (!copyWriter.isDone()) { + copyWriter.copyChunk(); + } + final Blob destBlob = copyWriter.getResult(); + final boolean deleted = storage.delete(sourceBlobId); + if ((deleted == false) || (destBlob.reload() == null)) { + throw new IOException("Failed to move source [" + sourceBlobName + "] to target [" + targetBlobName + "]."); + } }); } - private String buildKey(String keyPath, String s) { + private static String buildKey(String keyPath, String s) { assert s != null; return keyPath + s; } - /** - * Converts a {@link StorageObject} to a {@link PlainBlobMetaData} - */ - class BlobMetaDataConverter implements Function { - - private final String pathToRemove; - - BlobMetaDataConverter(String pathToRemove) { - this.pathToRemove = pathToRemove; - } - - @Override - public PlainBlobMetaData apply(StorageObject storageObject) { - String blobName = storageObject.getName(); - if (Strings.hasLength(pathToRemove)) { - blobName = blobName.substring(pathToRemove.length()); - } - return new PlainBlobMetaData(blobName, storageObject.getSize().longValue()); - } - } - - /** - * Spliterator can be used to list storage objects stored in a bucket. - */ - static class StorageObjectsSpliterator implements Spliterator { - - private final Storage.Objects.List list; - - StorageObjectsSpliterator(Storage client, String bucketName, String prefix, long pageSize) throws IOException { - list = SocketAccess.doPrivilegedIOException(() -> client.objects().list(bucketName)); - list.setMaxResults(pageSize); - if (prefix != null) { - list.setPrefix(prefix); - } - } - - @Override - public boolean tryAdvance(Consumer action) { - try { - // Retrieves the next page of items - Objects objects = SocketAccess.doPrivilegedIOException(list::execute); - - if ((objects == null) || (objects.getItems() == null) || (objects.getItems().isEmpty())) { - return false; - } - - // Consumes all the items - objects.getItems().forEach(action::accept); - - // Sets the page token of the next page, - // null indicates that all items have been consumed - String next = objects.getNextPageToken(); - if (next != null) { - list.setPageToken(next); - return true; - } - - return false; - } catch (Exception e) { - throw new BlobStoreException("Exception while listing objects", e); - } - } - - @Override - public Spliterator trySplit() { - return null; - } - - @Override - public long estimateSize() { - return Long.MAX_VALUE; - } - - @Override - public int characteristics() { - return 0; - } - } - - /** - * Returns a {@link Stream} of {@link StorageObject}s that are stored in a given bucket. - */ - static Stream blobsStream(Storage client, String bucketName, String prefix, long pageSize) throws IOException { - return StreamSupport.stream(new StorageObjectsSpliterator(client, bucketName, prefix, pageSize), false); - } - } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 03295c18c8ae6..fe4ff20cb87aa 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.services.storage.StorageScopes; +import com.google.auth.oauth2.ServiceAccountCredentials; + import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -30,8 +30,8 @@ import java.io.UncheckedIOException; import java.util.Collections; import java.util.HashMap; -import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.settings.Setting.timeSetting; @@ -44,11 +44,24 @@ public class GoogleCloudStorageClientSettings { /** A json Service Account file loaded from secure settings. */ static final Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(PREFIX, "credentials_file", - key -> SecureSetting.secureFile(key, null)); + key -> SecureSetting.secureFile(key, null)); - /** An override for the Storage endpoint to connect to. */ + /** + * An override for the Storage endpoint to connect to. Deprecated, use host + * setting. + */ static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", - key -> new Setting<>(key, "", s -> s, Setting.Property.NodeScope)); + key -> Setting.simpleString(key, Setting.Property.NodeScope, Setting.Property.Deprecated)); + + /** An override for the Storage host name to connect to. */ + static final Setting.AffixSetting HOST_SETTING = Setting.affixKeySetting(PREFIX, "host", + key -> Setting.simpleString(key, + ENDPOINT_SETTING.getConcreteSetting(key.substring(0, key.length() - "host".length()) + "endpoint"), + Setting.Property.NodeScope)); + + /** An override for the Google Project ID. */ + static final Setting.AffixSetting PROJECT_ID_SETTING = Setting.affixKeySetting(PREFIX, "project_id", + key -> Setting.simpleString(key, Setting.Property.NodeScope)); /** * The timeout to establish a connection. A value of {@code -1} corresponds to an infinite timeout. A value of {@code 0} @@ -66,13 +79,23 @@ public class GoogleCloudStorageClientSettings { /** Name used by the client when it uses the Google Cloud JSON API. **/ static final Setting.AffixSetting APPLICATION_NAME_SETTING = Setting.affixKeySetting(PREFIX, "application_name", - key -> new Setting<>(key, "repository-gcs", s -> s, Setting.Property.NodeScope)); + key -> new Setting<>(key, "elasticsearch-repository-gcs", Function.identity(), Setting.Property.NodeScope, + Setting.Property.Deprecated)); /** The credentials used by the client to connect to the Storage endpoint **/ - private final GoogleCredential credential; + private final ServiceAccountCredentials credential; - /** The Storage root URL the client should talk to, or empty string to use the default. **/ - private final String endpoint; + /** + * The Storage root URL (hostname) the client should talk to, or null string to + * use the default. + **/ + private final String host; + + /** + * The Google project ID overriding the default way to infer it. Null value sets + * the default. + **/ + private final String projectId; /** The timeout to establish a connection **/ private final TimeValue connectTimeout; @@ -83,24 +106,29 @@ public class GoogleCloudStorageClientSettings { /** The Storage client application name **/ private final String applicationName; - GoogleCloudStorageClientSettings(final GoogleCredential credential, - final String endpoint, + GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, + final String host, final String projectId, final TimeValue connectTimeout, final TimeValue readTimeout, final String applicationName) { this.credential = credential; - this.endpoint = endpoint; + this.host = host; + this.projectId = projectId; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; this.applicationName = applicationName; } - public GoogleCredential getCredential() { + public ServiceAccountCredentials getCredential() { return credential; } - public String getEndpoint() { - return endpoint; + public String getHost() { + return host; + } + + public String getProjectId() { + return projectId; } public TimeValue getConnectTimeout() { @@ -117,7 +145,7 @@ public String getApplicationName() { public static Map load(final Settings settings) { final Map clients = new HashMap<>(); - for (String clientName: settings.getGroups(PREFIX).keySet()) { + for (final String clientName: settings.getGroups(PREFIX).keySet()) { clients.put(clientName, getClientSettings(settings, clientName)); } if (clients.containsKey("default") == false) { @@ -131,7 +159,7 @@ public static Map load(final Settings static GoogleCloudStorageClientSettings getClientSettings(final Settings settings, final String clientName) { return new GoogleCloudStorageClientSettings( loadCredential(settings, clientName), - getConfigValue(settings, clientName, ENDPOINT_SETTING), + getConfigValue(settings, clientName, HOST_SETTING), getConfigValue(settings, clientName, PROJECT_ID_SETTING), getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), getConfigValue(settings, clientName, APPLICATION_NAME_SETTING) @@ -147,7 +175,7 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting * * @return the {@link GoogleCredential} to use for the given client, {@code null} if no service account is defined. */ - static GoogleCredential loadCredential(final Settings settings, final String clientName) { + static ServiceAccountCredentials loadCredential(final Settings settings, final String clientName) { try { if (CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).exists(settings) == false) { // explicitly returning null here so that the default credential @@ -155,19 +183,15 @@ static GoogleCredential loadCredential(final Settings settings, final String cli return null; } try (InputStream credStream = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).get(settings)) { - GoogleCredential credential = GoogleCredential.fromStream(credStream); - if (credential.createScopedRequired()) { - credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - } - return credential; + return ServiceAccountCredentials.fromStream(credStream); } - } catch (IOException e) { + } catch (final IOException e) { throw new UncheckedIOException(e); } } private static T getConfigValue(final Settings settings, final String clientName, final Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index ef24cd959e55b..16d7f654012c2 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -19,21 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.auth.oauth2.TokenRequest; -import com.google.api.client.auth.oauth2.TokenResponse; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.json.GenericJson; -import com.google.api.client.json.webtoken.JsonWebSignature; -import com.google.api.client.json.webtoken.JsonWebToken; -import com.google.api.client.util.ClassInfo; -import com.google.api.client.util.Data; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.Objects; -import com.google.api.services.storage.model.StorageObject; -import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -42,8 +27,6 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -51,63 +34,6 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { - static { - /* - * Google HTTP client changes access levels because its silly and we - * can't allow that on any old stack stack so we pull it here, up front, - * so we can cleanly check the permissions for it. Without this changing - * the permission can fail if any part of core is on the stack because - * our plugin permissions don't allow core to "reach through" plugins to - * change the permission. Because that'd be silly. - */ - SpecialPermission.check(); - AccessController.doPrivileged((PrivilegedAction) () -> { - // ClassInfo put in cache all the fields of a given class - // that are annoted with @Key; at the same time it changes - // the field access level using setAccessible(). Calling - // them here put the ClassInfo in cache (they are never evicted) - // before the SecurityManager is installed. - ClassInfo.of(HttpHeaders.class, true); - - ClassInfo.of(JsonWebSignature.Header.class, false); - ClassInfo.of(JsonWebToken.Payload.class, false); - - ClassInfo.of(TokenRequest.class, false); - ClassInfo.of(TokenResponse.class, false); - - ClassInfo.of(GenericJson.class, false); - ClassInfo.of(GenericUrl.class, false); - - Data.nullOf(GoogleJsonError.ErrorInfo.class); - ClassInfo.of(GoogleJsonError.class, false); - - Data.nullOf(Bucket.Cors.class); - ClassInfo.of(Bucket.class, false); - ClassInfo.of(Bucket.Cors.class, false); - ClassInfo.of(Bucket.Lifecycle.class, false); - ClassInfo.of(Bucket.Logging.class, false); - ClassInfo.of(Bucket.Owner.class, false); - ClassInfo.of(Bucket.Versioning.class, false); - ClassInfo.of(Bucket.Website.class, false); - - ClassInfo.of(StorageObject.class, false); - ClassInfo.of(StorageObject.Owner.class, false); - - ClassInfo.of(Objects.class, false); - - ClassInfo.of(Storage.Buckets.Get.class, false); - ClassInfo.of(Storage.Buckets.Insert.class, false); - - ClassInfo.of(Storage.Objects.Get.class, false); - ClassInfo.of(Storage.Objects.Insert.class, false); - ClassInfo.of(Storage.Objects.Delete.class, false); - ClassInfo.of(Storage.Objects.Copy.class, false); - ClassInfo.of(Storage.Objects.List.class, false); - - return null; - }); - } - private final Map clientsSettings; public GoogleCloudStoragePlugin(final Settings settings) { @@ -134,6 +60,8 @@ public List> getSettings() { return Arrays.asList( GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING, GoogleCloudStorageClientSettings.ENDPOINT_SETTING, + GoogleCloudStorageClientSettings.HOST_SETTING, + GoogleCloudStorageClientSettings.PROJECT_ID_SETTING, GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index e193b8238b8d2..2cf9b939ae8f9 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -19,7 +19,7 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.services.storage.Storage; +import com.google.cloud.storage.Storage; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -27,7 +27,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.RepositoryException; @@ -39,7 +38,6 @@ import static org.elasticsearch.common.settings.Setting.boolSetting; import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.simpleString; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; class GoogleCloudStorageRepository extends BlobStoreRepository { @@ -69,12 +67,12 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { GoogleCloudStorageService storageService) throws Exception { super(metadata, environment.settings(), namedXContentRegistry); - String bucket = getSetting(BUCKET, metadata); - String clientName = CLIENT_NAME.get(metadata.settings()); - String basePath = BASE_PATH.get(metadata.settings()); + final String bucket = getSetting(BUCKET, metadata); + final String clientName = CLIENT_NAME.get(metadata.settings()); + final String basePath = BASE_PATH.get(metadata.settings()); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); - for (String elem : basePath.split("/")) { + for (final String elem : basePath.split("/")) { path = path.add(elem); } this.basePath = path; @@ -87,7 +85,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); - Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName)); + final Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName)); this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); } @@ -116,11 +114,11 @@ protected ByteSizeValue chunkSize() { * Get a given setting from the repository settings, throwing a {@link RepositoryException} if the setting does not exist or is empty. */ static T getSetting(Setting setting, RepositoryMetaData metadata) { - T value = setting.get(metadata.settings()); + final T value = setting.get(metadata.settings()); if (value == null) { throw new RepositoryException(metadata.name(), "Setting [" + setting.getKey() + "] is not defined for repository"); } - if ((value instanceof String) && (Strings.hasText((String) value)) == false) { + if ((value instanceof String) && ((Strings.hasText((String) value)) == false)) { throw new RepositoryException(metadata.name(), "Setting [" + setting.getKey() + "] is empty for repository"); } return value; diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index bccc5e0ffdc5c..f638c3d34a856 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -19,74 +19,85 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpBackOffIOExceptionHandler; -import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.client.util.ExponentialBackOff; -import com.google.api.services.storage.Storage; +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; +import org.threeten.bp.Duration; -import java.io.IOException; import java.util.Map; public class GoogleCloudStorageService extends AbstractComponent { /** Clients settings identified by client name. */ private final Map clientsSettings; + private final RetrySettings retrySettings; - public GoogleCloudStorageService(final Environment environment, final Map clientsSettings) { + public GoogleCloudStorageService(Environment environment, Map clientsSettings) { super(environment.settings()); this.clientsSettings = clientsSettings; + this.retrySettings = RetrySettings.newBuilder() + .setInitialRetryDelay(Duration.ofMillis(100)) + .setMaxRetryDelay(Duration.ofMillis(6000)) + .setTotalTimeout(Duration.ofMillis(900000)) + .setRetryDelayMultiplier(1.5d) + .setJittered(true) + .build(); } /** * Creates a client that can be used to manage Google Cloud Storage objects. * - * @param clientName name of client settings to use from secure settings + * @param clientName + * name of client settings to use from secure settings * @return a Client instance that can be used to manage Storage objects */ - public Storage createClient(final String clientName) throws Exception { + public Storage createClient(String clientName) { final GoogleCloudStorageClientSettings clientSettings = clientsSettings.get(clientName); if (clientSettings == null) { - throw new IllegalArgumentException("Unknown client name [" + clientName + "]. Existing client configs: " + - Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + throw new IllegalArgumentException("Unknown client name [" + clientName + "]. Existing client configs: " + + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); } - - HttpTransport transport = GoogleNetHttpTransport.newTrustedTransport(); - HttpRequestInitializer requestInitializer = createRequestInitializer(clientSettings); - - Storage.Builder storage = new Storage.Builder(transport, JacksonFactory.getDefaultInstance(), requestInitializer); - if (Strings.hasLength(clientSettings.getApplicationName())) { - storage.setApplicationName(clientSettings.getApplicationName()); + final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder() + .setConnectTimeout(toTimeout(clientSettings.getConnectTimeout())) + .setReadTimeout(toTimeout(clientSettings.getReadTimeout())) + .build(); + final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() + .setRetrySettings(retrySettings) + .setTransportOptions(httpTransportOptions) + .setHeaderProvider(() -> { + final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); + if (Strings.hasLength(clientSettings.getApplicationName())) { + mapBuilder.put("user-agent", clientSettings.getApplicationName()); + } + return mapBuilder.immutableMap(); + }); + if (Strings.hasLength(clientSettings.getProjectId())) { + storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } - if (Strings.hasLength(clientSettings.getEndpoint())) { - storage.setRootUrl(clientSettings.getEndpoint()); + if (Strings.hasLength(clientSettings.getHost())) { + storageOptionsBuilder.setHost(clientSettings.getHost()); } - return storage.build(); - } - - static HttpRequestInitializer createRequestInitializer(final GoogleCloudStorageClientSettings settings) throws IOException { - GoogleCredential credential = settings.getCredential(); - if (credential == null) { - credential = GoogleCredential.getApplicationDefault(); + if (clientSettings.getCredential() != null) { + storageOptionsBuilder.setCredentials(clientSettings.getCredential()); } - return new DefaultHttpRequestInitializer(credential, toTimeout(settings.getConnectTimeout()), toTimeout(settings.getReadTimeout())); + return storageOptionsBuilder.build().getService(); } - /** Converts timeout values from the settings to a timeout value for the Google Cloud SDK **/ - static Integer toTimeout(final TimeValue timeout) { + /** + * Converts timeout values from the settings to a timeout value for the Google + * Cloud SDK + **/ + private static Integer toTimeout(TimeValue timeout) { // Null or zero in settings means the default timeout - if (timeout == null || TimeValue.ZERO.equals(timeout)) { - return null; + if ((timeout == null) || TimeValue.ZERO.equals(timeout)) { + // negative value means using the default value + return -1; } // -1 means infinite timeout if (TimeValue.MINUS_ONE.equals(timeout)) { @@ -96,51 +107,4 @@ static Integer toTimeout(final TimeValue timeout) { return Math.toIntExact(timeout.getMillis()); } - /** - * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. - * See https://cloud.google.com/storage/transfer/create-client#retry - */ - static class DefaultHttpRequestInitializer implements HttpRequestInitializer { - - private final Integer connectTimeout; - private final Integer readTimeout; - private final GoogleCredential credential; - - DefaultHttpRequestInitializer(GoogleCredential credential, Integer connectTimeoutMillis, Integer readTimeoutMillis) { - this.credential = credential; - this.connectTimeout = connectTimeoutMillis; - this.readTimeout = readTimeoutMillis; - } - - @Override - public void initialize(HttpRequest request) { - if (connectTimeout != null) { - request.setConnectTimeout(connectTimeout); - } - if (readTimeout != null) { - request.setReadTimeout(readTimeout); - } - - request.setIOExceptionHandler(new HttpBackOffIOExceptionHandler(newBackOff())); - request.setInterceptor(credential); - - final HttpUnsuccessfulResponseHandler handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); - request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { - // Let the credential handle the response. If it failed, we rely on our backoff handler - return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); - } - ); - } - - private ExponentialBackOff newBackOff() { - return new ExponentialBackOff.Builder() - .setInitialIntervalMillis(100) - .setMaxIntervalMillis(6000) - .setMaxElapsedTimeMillis(900000) - .setMultiplier(1.5) - .setRandomizationFactor(0.5) - .build(); - } - } - } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 325cea132beb6..73846ac4894ef 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,27 +19,13 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.http.AbstractInputStreamContent; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.http.HttpMethods; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponseException; -import com.google.api.client.http.LowLevelHttpRequest; -import com.google.api.client.http.LowLevelHttpResponse; -import com.google.api.client.http.MultipartContent; -import com.google.api.client.json.JsonFactory; -import com.google.api.client.testing.http.MockHttpTransport; -import com.google.api.client.testing.http.MockLowLevelHttpRequest; -import com.google.api.client.testing.http.MockLowLevelHttpResponse; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.StorageObject; import org.elasticsearch.common.io.Streams; import org.elasticsearch.rest.RestStatus; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.Storage.BlobListOption; +import com.google.cloud.storage.Storage.CopyRequest; + import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -86,7 +72,7 @@ public Get get(String getBucket) { @Override public Bucket execute() { if (bucketName.equals(getBucket())) { - Bucket bucket = new Bucket(); + final Bucket bucket = new Bucket(); bucket.setId(bucketName); return bucket; } else { @@ -111,7 +97,7 @@ public StorageObject execute() throws IOException { throw newObjectNotFoundException(getObject()); } - StorageObject storageObject = new StorageObject(); + final StorageObject storageObject = new StorageObject(); storageObject.setId(getObject()); return storageObject; } @@ -138,7 +124,7 @@ public StorageObject execute() throws IOException { throw newBucketNotFoundException(getBucket()); } - ByteArrayOutputStream out = new ByteArrayOutputStream(); + final ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(insertStream.getInputStream(), out); blobs.put(getName(), out.toByteArray()); return null; @@ -158,9 +144,9 @@ public com.google.api.services.storage.model.Objects execute() throws IOExceptio final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects(); final java.util.List storageObjects = new ArrayList<>(); - for (Entry blob : blobs.entrySet()) { - if (getPrefix() == null || blob.getKey().startsWith(getPrefix())) { - StorageObject storageObject = new StorageObject(); + for (final Entry blob : blobs.entrySet()) { + if ((getPrefix() == null) || blob.getKey().startsWith(getPrefix())) { + final StorageObject storageObject = new StorageObject(); storageObject.setId(blob.getKey()); storageObject.setName(blob.getKey()); storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length)); @@ -193,7 +179,7 @@ public Void execute() throws IOException { @Override public HttpRequest buildHttpRequest() throws IOException { - HttpRequest httpRequest = super.buildHttpRequest(); + final HttpRequest httpRequest = super.buildHttpRequest(); httpRequest.getHeaders().put(DELETION_HEADER, getObject()); return httpRequest; } @@ -218,7 +204,7 @@ public StorageObject execute() throws IOException { } blobs.put(getDestinationObject(), bytes); - StorageObject storageObject = new StorageObject(); + final StorageObject storageObject = new StorageObject(); storageObject.setId(getDestinationObject()); return storageObject; } @@ -227,12 +213,12 @@ public StorageObject execute() throws IOException { } private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) { - HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); + final HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); return new GoogleJsonResponseException(builder, new GoogleJsonError()); } private static GoogleJsonResponseException newObjectNotFoundException(final String object) { - HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); + final HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); return new GoogleJsonResponseException(builder, new GoogleJsonError()); } @@ -269,7 +255,7 @@ public LowLevelHttpResponse execute() throws IOException { getStreamingContent().writeTo(out); Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> { - if (line != null && line.startsWith(DELETION_HEADER)) { + if ((line != null) && line.startsWith(DELETION_HEADER)) { builder.append("--__END_OF_PART__\r\n"); builder.append("Content-Type: application/http").append("\r\n"); builder.append("\r\n"); @@ -292,7 +278,7 @@ public LowLevelHttpResponse execute() throws IOException { builder.append("--__END_OF_PART__--"); } - MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); + final MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); response.setStatusCode(200); response.setContent(builder.toString()); response.setContentType(contentType); From dda51f48761269ccc1c6ac7df4bdbe4ada3b30d1 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 18 Apr 2018 12:25:47 +0300 Subject: [PATCH 02/45] Mock is completed --- .../gcs/GoogleCloudStorageBlobStore.java | 3 - .../gcs/GoogleCloudStorageClientSettings.java | 3 +- ...eCloudStorageBlobStoreRepositoryTests.java | 6 +- ...GoogleCloudStorageClientSettingsTests.java | 60 +- .../repositories/gcs/MockStorage.java | 715 ++++++++++++------ 5 files changed, 540 insertions(+), 247 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index ea9f540a704a6..bdb70fc907ee8 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -280,9 +280,6 @@ void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { SocketAccess.doPrivilegedVoidIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete final CopyWriter copyWriter = storage.copy(request); - while (!copyWriter.isDone()) { - copyWriter.copyChunk(); - } final Blob destBlob = copyWriter.getResult(); final boolean deleted = storage.delete(sourceBlobId); if ((deleted == false) || (destBlob.reload() == null)) { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index fe4ff20cb87aa..eaf1e995ee264 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -107,7 +107,8 @@ public class GoogleCloudStorageClientSettings { private final String applicationName; GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, - final String host, final String projectId, + final String host, + final String projectId, final TimeValue connectTimeout, final TimeValue readTimeout, final String applicationName) { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 19551f3b082fa..260cc7a93103c 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -19,7 +19,9 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.services.storage.Storage; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.Storage; + import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -43,7 +45,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos // Static list of blobs shared among all nodes in order to act like a remote repository service: // all nodes must see the same content - private static final ConcurrentMap blobs = new ConcurrentHashMap<>(); + private static final ConcurrentMap blobs = new ConcurrentHashMap<>(); @Override protected Collection> nodePlugins() { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index badd86cd8a2b3..2f07cb1ce2435 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.services.storage.StorageScopes; +import com.google.auth.oauth2.ServiceAccountCredentials; + import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; @@ -30,7 +30,6 @@ import java.security.KeyPair; import java.security.KeyPairGenerator; import java.util.Base64; -import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; @@ -38,7 +37,8 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; -import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.HOST_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.PROJECT_ID_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.getClientSettings; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.loadCredential; @@ -66,7 +66,8 @@ public void testLoad() throws Exception { assertNotNull(expectedClientSettings); assertGoogleCredential(expectedClientSettings.getCredential(), actualClientSettings.getCredential()); - assertEquals(expectedClientSettings.getEndpoint(), actualClientSettings.getEndpoint()); + assertEquals(expectedClientSettings.getHost(), actualClientSettings.getHost()); + assertEquals(expectedClientSettings.getProjectId(), actualClientSettings.getProjectId()); assertEquals(expectedClientSettings.getConnectTimeout(), actualClientSettings.getConnectTimeout()); assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout()); assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName()); @@ -109,16 +110,24 @@ private static GoogleCloudStorageClientSettings randomClient(final String client final Settings.Builder settings, final MockSecureSettings secureSettings) throws Exception { - Tuple credentials = randomCredential(clientName); - GoogleCredential credential = credentials.v1(); + Tuple credentials = randomCredential(clientName); + ServiceAccountCredentials credential = credentials.v1(); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).getKey(), credentials.v2()); - String endpoint; + String host; + if (randomBoolean()) { + host = randomAlphaOfLength(5); + settings.put(HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); + } else { + host = HOST_SETTING.getDefault(Settings.EMPTY); + } + + String projectId; if (randomBoolean()) { - endpoint = randomAlphaOfLength(5); - settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); + projectId = randomAlphaOfLength(5); + settings.put(PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectId); } else { - endpoint = ENDPOINT_SETTING.getDefault(Settings.EMPTY); + projectId = PROJECT_ID_SETTING.getDefault(Settings.EMPTY); } TimeValue connectTimeout; @@ -145,19 +154,18 @@ private static GoogleCloudStorageClientSettings randomClient(final String client applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); } - return new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); + return new GoogleCloudStorageClientSettings(credential, host, projectId, connectTimeout, readTimeout, applicationName); } /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ - private static Tuple randomCredential(final String clientName) throws Exception { + private static Tuple randomCredential(final String clientName) throws Exception { KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); - GoogleCredential.Builder credentialBuilder = new GoogleCredential.Builder(); - credentialBuilder.setServiceAccountId(clientName); - credentialBuilder.setServiceAccountProjectId("project_id_" + clientName); - credentialBuilder.setServiceAccountScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - credentialBuilder.setServiceAccountPrivateKey(keyPair.getPrivate()); - credentialBuilder.setServiceAccountPrivateKeyId("private_key_id_" + clientName); + ServiceAccountCredentials.Builder credentialBuilder = ServiceAccountCredentials.newBuilder(); + credentialBuilder.setClientId(clientName); + credentialBuilder.setProjectId("project_id_" + clientName); + credentialBuilder.setPrivateKey(keyPair.getPrivate()); + credentialBuilder.setPrivateKeyId("private_key_id_" + clientName); String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); String serviceAccount = "{\"type\":\"service_account\"," + @@ -182,14 +190,16 @@ private static TimeValue randomTimeout() { return randomFrom(TimeValue.MINUS_ONE, TimeValue.ZERO, TimeValue.parseTimeValue(randomPositiveTimeValue(), "test")); } - private static void assertGoogleCredential(final GoogleCredential expected, final GoogleCredential actual) { + private static void assertGoogleCredential(final ServiceAccountCredentials expected, final ServiceAccountCredentials actual) { if (expected != null) { assertEquals(expected.getServiceAccountUser(), actual.getServiceAccountUser()); - assertEquals(expected.getServiceAccountId(), actual.getServiceAccountId()); - assertEquals(expected.getServiceAccountProjectId(), actual.getServiceAccountProjectId()); - assertEquals(expected.getServiceAccountScopesAsString(), actual.getServiceAccountScopesAsString()); - assertEquals(expected.getServiceAccountPrivateKey(), actual.getServiceAccountPrivateKey()); - assertEquals(expected.getServiceAccountPrivateKeyId(), actual.getServiceAccountPrivateKeyId()); + assertEquals(expected.getClientId(), actual.getClientId()); + assertEquals(expected.getClientEmail(), actual.getClientEmail()); + assertEquals(expected.getAccount(), actual.getAccount()); + assertEquals(expected.getProjectId(), actual.getProjectId()); + assertEquals(expected.getScopes(), actual.getScopes()); + assertEquals(expected.getPrivateKey(), actual.getPrivateKey()); + assertEquals(expected.getPrivateKeyId(), actual.getPrivateKeyId()); } else { assertNull(actual); } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 73846ac4894ef..be141a2d9fbe6 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,275 +19,558 @@ package org.elasticsearch.repositories.gcs; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.rest.RestStatus; - +import org.mockito.Matchers; + +import com.google.api.gax.paging.Page; +import com.google.cloud.Policy; +import com.google.cloud.ReadChannel; +import com.google.cloud.RestorableState; +import com.google.cloud.WriteChannel; +import com.google.cloud.storage.Acl; +import com.google.cloud.storage.Acl.Entity; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.BlobId; +import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.Bucket; +import com.google.cloud.storage.BucketInfo; +import com.google.cloud.storage.CopyWriter; +import com.google.cloud.storage.ServiceAccount; import com.google.cloud.storage.Storage; -import com.google.cloud.storage.Storage.BlobListOption; -import com.google.cloud.storage.Storage.CopyRequest; +import com.google.cloud.storage.spi.v1.StorageRpc; +import com.google.cloud.storage.StorageBatch; +import com.google.cloud.storage.StorageOptions; -import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.math.BigInteger; +import java.lang.reflect.Method; +import java.net.URL; +import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeUnit; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; /** * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs * in a given concurrent map. */ -class MockStorage extends Storage { +class MockStorage implements Storage { + + private final Bucket theBucket; + private final ConcurrentMap blobsMap; + + MockStorage(final String bucketName, final ConcurrentMap blobs) { + this.blobsMap = blobs; + // mock bucket + this.theBucket = mock(Bucket.class); + when(this.theBucket.getName()).thenReturn(bucketName); + doAnswer(invocation -> { + assert invocation.getArguments().length == 1 : "Only a single filter is mocked"; + final BlobListOption prefixFilter = (BlobListOption) invocation.getArguments()[0]; + final Method optionMethod = BlobListOption.class.getDeclaredMethod("getRpcOption"); + optionMethod.setAccessible(true); + assert StorageRpc.Option.PREFIX.equals(optionMethod.invoke(prefixFilter)) : "Only the prefix filter is mocked"; + final Method valueMethod = BlobListOption.class.getDeclaredMethod("getValue"); + valueMethod.setAccessible(true); + final String prefixValue = (String) valueMethod.invoke(prefixFilter); + return new Page() { + @Override + public boolean hasNextPage() { + return false; + } - /* A custom HTTP header name used to propagate the name of the blobs to delete in batch requests */ - private static final String DELETION_HEADER = "x-blob-to-delete"; + @Override + public String getNextPageToken() { + return null; + } - private final String bucketName; - private final ConcurrentMap blobs; + @Override + public Page getNextPage() { + return null; + } + + @Override + public Iterable iterateAll() { + return getValues(); + } - MockStorage(final String bucket, final ConcurrentMap blobs) { - super(new MockedHttpTransport(blobs), mock(JsonFactory.class), mock(HttpRequestInitializer.class)); - this.bucketName = bucket; - this.blobs = blobs; + @Override + public Iterable getValues() { + return () -> blobs.entrySet() + .stream() + .filter(entry1 -> entry1.getKey().startsWith(prefixValue)) + .map(entry2 -> entry2.getValue()) + .iterator(); + } + }; + }).when(this.theBucket).list(Matchers.anyVararg()); } @Override - public Buckets buckets() { - return new MockBuckets(); + public StorageOptions getOptions() { + return StorageOptions.getDefaultInstance(); } @Override - public Objects objects() { - return new MockObjects(); + public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { + throw new RuntimeException("Mock not implemented"); } - class MockBuckets extends Buckets { + @Override + public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { + throw new RuntimeException("Mock not implemented"); + } - @Override - public Get get(String getBucket) { - return new Get(getBucket) { - @Override - public Bucket execute() { - if (bucketName.equals(getBucket())) { - final Bucket bucket = new Bucket(); - bucket.setId(bucketName); - return bucket; - } else { - return null; - } - } - }; - } + @Override + public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { + throw new RuntimeException("Mock not implemented"); } - class MockObjects extends Objects { + @Override + public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { + throw new RuntimeException("Mock not implemented"); + } - @Override - public Get get(String getBucket, String getObject) { - return new Get(getBucket, getObject) { - @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } - - final StorageObject storageObject = new StorageObject(); - storageObject.setId(getObject()); - return storageObject; - } + @Override + public Bucket get(String bucketName, BucketGetOption... options) { + assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + return theBucket; + } - @Override - public InputStream executeMediaAsInputStream() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } - return new ByteArrayInputStream(blobs.get(getObject())); - } - }; - } + @Override + public Blob get(String bucketName, String blobName, BlobGetOption... options) { + assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + return blobsMap.get(blobName); + } - @Override - public Insert insert(String insertBucket, StorageObject insertObject, AbstractInputStreamContent insertStream) { - return new Insert(insertBucket, insertObject) { - @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - - final ByteArrayOutputStream out = new ByteArrayOutputStream(); - Streams.copy(insertStream.getInputStream(), out); - blobs.put(getName(), out.toByteArray()); - return null; - } - }; - } + @Override + public Blob get(BlobId blob, BlobGetOption... options) { + return get(blob.getBucket(), blob.getName()); + } - @Override - public List list(String listBucket) { - return new List(listBucket) { - @Override - public com.google.api.services.storage.model.Objects execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - - final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects(); - - final java.util.List storageObjects = new ArrayList<>(); - for (final Entry blob : blobs.entrySet()) { - if ((getPrefix() == null) || blob.getKey().startsWith(getPrefix())) { - final StorageObject storageObject = new StorageObject(); - storageObject.setId(blob.getKey()); - storageObject.setName(blob.getKey()); - storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length)); - storageObjects.add(storageObject); - } - } - - objects.setItems(storageObjects); - return objects; + @Override + public Blob get(BlobId blob) { + return get(blob.getBucket(), blob.getName()); + } + + @Override + public Page list(BucketListOption... options) { + return new Page() { + @Override + public boolean hasNextPage() { + return false; + } + @Override + public String getNextPageToken() { + return null; + } + @Override + public Page getNextPage() { + return null; + } + @Override + public Iterable iterateAll() { + return getValues(); + } + @Override + public Iterable getValues() { + return Arrays.asList(theBucket); + } + }; + } + + @Override + public Page list(String bucketName, BlobListOption... options) { + assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + return new Page() { + @Override + public boolean hasNextPage() { + return false; + } + + @Override + public String getNextPageToken() { + return null; + } + + @Override + public Page getNextPage() { + return null; + } + + @Override + public Iterable iterateAll() { + return getValues(); + } + + @Override + public Iterable getValues() { + return blobsMap.values(); + } + }; + } + + @Override + public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Blob update(BlobInfo blobInfo) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public boolean delete(String bucket, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public boolean delete(String bucketName, String blobName, BlobSourceOption... options) { + assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + return blobsMap.remove(blobName) != null; + } + + @Override + public boolean delete(BlobId blob, BlobSourceOption... options) { + return delete(blob.getBucket(), blob.getName()); + } + + @Override + public boolean delete(BlobId blob) { + return delete(blob.getBucket(), blob.getName()); + } + + @Override + public Blob compose(ComposeRequest composeRequest) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public CopyWriter copy(CopyRequest copyRequest) { + assert copyRequest.getSource().getBucket().equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + assert copyRequest.getTarget().getBucket().equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + final Blob sourceBlob = blobsMap.get(copyRequest.getSource().getName()); + return sourceBlob.copyTo(copyRequest.getTarget().getBucket(), copyRequest.getTarget().getName()); + } + + @Override + public byte[] readAllBytes(String bucketName, String blobName, BlobSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public StorageBatch batch() { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { + assert blobInfo.getBucket().equals(this.theBucket.getName()) : "Only a single bucket is mocked"; + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + return new WriteChannel() { + private boolean isOpenFlag = true; + + @Override + public boolean isOpen() { + return isOpenFlag; + } + + @Override + public void close() throws IOException { + constructMockBlob(blobInfo.getName(), baos.toByteArray(), blobsMap); + isOpenFlag = false; + } + + @Override + public int write(ByteBuffer src) throws IOException { + final int size1 = baos.size(); + while (src.hasRemaining()) { + baos.write(src.get()); } - }; + final int size2 = baos.size(); + return size2 - size1; + } + + @Override + public void setChunkSize(int chunkSize) { + } + + @Override + public RestorableState capture() { + return null; + } + }; + } + + @Override + public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public List get(BlobId... blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(get(blobId)); } + return ans; + } - @Override - public Delete delete(String deleteBucket, String deleteObject) { - return new Delete(deleteBucket, deleteObject) { - @Override - public Void execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } + @Override + public List get(Iterable blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(get(blobId)); + } + return ans; + } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } + @Override + public List update(BlobInfo... blobInfos) { + throw new RuntimeException("Mock not implemented"); + } - blobs.remove(getObject()); - return null; - } + @Override + public List update(Iterable blobInfos) { + throw new RuntimeException("Mock not implemented"); + } - @Override - public HttpRequest buildHttpRequest() throws IOException { - final HttpRequest httpRequest = super.buildHttpRequest(); - httpRequest.getHeaders().put(DELETION_HEADER, getObject()); - return httpRequest; - } - }; + @Override + public List delete(BlobId... blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(delete(blobId)); } + return ans; + } - @Override - public Copy copy(String srcBucket, String srcObject, String destBucket, String destObject, StorageObject content) { - return new Copy(srcBucket, srcObject, destBucket, destObject, content) { - @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getSourceBucket()) == false) { - throw newBucketNotFoundException(getSourceBucket()); - } - if (bucketName.equals(getDestinationBucket()) == false) { - throw newBucketNotFoundException(getDestinationBucket()); - } - - final byte[] bytes = blobs.get(getSourceObject()); - if (bytes == null) { - throw newObjectNotFoundException(getSourceObject()); - } - blobs.put(getDestinationObject(), bytes); - - final StorageObject storageObject = new StorageObject(); - storageObject.setId(getDestinationObject()); - return storageObject; - } - }; + @Override + public List delete(Iterable blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(delete(blobId)); } + return ans; + } + + @Override + public Acl getAcl(String bucket, Entity entity, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); } - private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) { - final HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); - return new GoogleJsonResponseException(builder, new GoogleJsonError()); + @Override + public Acl getAcl(String bucket, Entity entity) { + throw new RuntimeException("Mock not implemented"); } - private static GoogleJsonResponseException newObjectNotFoundException(final String object) { - final HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); - return new GoogleJsonResponseException(builder, new GoogleJsonError()); + @Override + public boolean deleteAcl(String bucket, Entity entity, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); } - /** - * {@link MockedHttpTransport} extends the existing testing transport to analyze the content - * of {@link com.google.api.client.googleapis.batch.BatchRequest} and delete the appropriates - * blobs. We use this because {@link Storage#batch()} is final and there is no other way to - * extend batch requests for testing purposes. - */ - static class MockedHttpTransport extends MockHttpTransport { + @Override + public boolean deleteAcl(String bucket, Entity entity) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl createAcl(String bucket, Acl acl, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl createAcl(String bucket, Acl acl) { + throw new RuntimeException("Mock not implemented"); + } - private final ConcurrentMap blobs; + @Override + public Acl updateAcl(String bucket, Acl acl, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl updateAcl(String bucket, Acl acl) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public List listAcls(String bucket, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public List listAcls(String bucket) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl getDefaultAcl(String bucket, Entity entity) { + throw new RuntimeException("Mock not implemented"); + } - MockedHttpTransport(final ConcurrentMap blobs) { - this.blobs = blobs; + @Override + public boolean deleteDefaultAcl(String bucket, Entity entity) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl createDefaultAcl(String bucket, Acl acl) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl updateDefaultAcl(String bucket, Acl acl) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public List listDefaultAcls(String bucket) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl getAcl(BlobId blob, Entity entity) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public boolean deleteAcl(BlobId blob, Entity entity) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl createAcl(BlobId blob, Acl acl) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Acl updateAcl(BlobId blob, Acl acl) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public List listAcls(BlobId blob) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Policy getIamPolicy(String bucket, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public Policy setIamPolicy(String bucket, Policy policy, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public List testIamPermissions(String bucket, List permissions, BucketSourceOption... options) { + throw new RuntimeException("Mock not implemented"); + } + + @Override + public ServiceAccount getServiceAccount(String projectId) { + throw new RuntimeException("Mock not implemented"); + } + + private static class ReadChannelFromByteArray implements ReadChannel { + private boolean isOpenFlag; + private final ByteBuffer byteBuffer; + + ReadChannelFromByteArray(byte[] srcArray) { + final byte[] clonedArray = Arrays.copyOf(srcArray, srcArray.length); + byteBuffer = ByteBuffer.wrap(clonedArray); + isOpenFlag = byteBuffer.hasRemaining(); } @Override - public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException { - // We analyze the content of the Batch request to detect our custom HTTP header, - // and extract from it the name of the blob to delete. Then we reply a simple - // batch response so that the client parser is happy. - // - // See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch for the - // format of the batch request body. - if (HttpMethods.POST.equals(method) && url.endsWith("/batch")) { - return new MockLowLevelHttpRequest() { - @Override - public LowLevelHttpResponse execute() throws IOException { - final String contentType = new MultipartContent().getType(); - - final StringBuilder builder = new StringBuilder(); - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - getStreamingContent().writeTo(out); - - Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> { - if ((line != null) && line.startsWith(DELETION_HEADER)) { - builder.append("--__END_OF_PART__\r\n"); - builder.append("Content-Type: application/http").append("\r\n"); - builder.append("\r\n"); - builder.append("HTTP/1.1 "); - - final String blobName = line.substring(line.indexOf(':') + 1).trim(); - if (blobs.containsKey(blobName)) { - builder.append(RestStatus.OK.getStatus()); - blobs.remove(blobName); - } else { - builder.append(RestStatus.NOT_FOUND.getStatus()); - } - builder.append("\r\n"); - builder.append("Content-Type: application/json; charset=UTF-8").append("\r\n"); - builder.append("Content-Length: 0").append("\r\n"); - builder.append("\r\n"); - } - }); - builder.append("\r\n"); - builder.append("--__END_OF_PART__--"); - } - - final MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); - response.setStatusCode(200); - response.setContent(builder.toString()); - response.setContentType(contentType); - return response; - } - }; - } else { - return super.buildRequest(method, url); + public boolean isOpen() { + return isOpenFlag; + } + + @Override + public int read(ByteBuffer dst) throws IOException { + final int size1 = dst.remaining(); + while (dst.hasRemaining() && byteBuffer.hasRemaining()) { + dst.put(byteBuffer.get()); } + final int size2 = dst.remaining(); + return size1 - size2; + } + + @Override + public void setChunkSize(int chunkSize) { + } + + @Override + public void seek(long position) throws IOException { + byteBuffer.position(Math.toIntExact(position)); + } + + @Override + public void close() { + isOpenFlag = false; + } + + @Override + public RestorableState capture() { + return null; } } + + private static Blob constructMockBlob(String blobName, byte[] data, ConcurrentMap blobsMap) { + final Blob blobMock = mock(Blob.class); + when(blobMock.getName()).thenReturn(blobName); + when(blobMock.getSize()).thenReturn((long) data.length); + when(blobMock.reload(Matchers.anyVararg())).thenReturn(blobMock); + when(blobMock.reader(Matchers.anyVararg())).thenReturn(new ReadChannelFromByteArray(data)); + when(blobMock.copyTo(Matchers.anyString(), Matchers.anyVararg())) + .thenThrow(new RuntimeException("Mock not implemented. Only a single bucket is mocked.")); + doAnswer(invocation -> { + final String copiedBlobName = (String) invocation.getArguments()[1]; + final Blob copiedMockBlob = constructMockBlob(copiedBlobName, data, blobsMap); + final CopyWriter ans = mock(CopyWriter.class); + when(ans.getResult()).thenReturn(copiedMockBlob); + when(ans.isDone()).thenReturn(true); + return ans; + }).when(blobMock.copyTo(Matchers.anyString(), Matchers.anyString(), Matchers.anyVararg())); + doAnswer(invocation -> { + final BlobId blobId = (BlobId) invocation.getArguments()[0]; + final Blob copiedMockBlob = constructMockBlob(blobId.getName(), data, blobsMap); + final CopyWriter ans = mock(CopyWriter.class); + when(ans.getResult()).thenReturn(copiedMockBlob); + when(ans.isDone()).thenReturn(true); + return ans; + }).when(blobMock.copyTo(Matchers.any(BlobId.class), Matchers.anyVararg())); + blobsMap.put(blobName, blobMock); + return blobMock; + } + } From 962f7c8b80177e0e048d60c4c7e069b3035bf412 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 18 Apr 2018 12:50:32 +0300 Subject: [PATCH 03/45] WIP --- .../repositories/gcs/GoogleCloudStorageService.java | 2 +- .../gcs/GoogleCloudStorageServiceTests.java | 13 ++----------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index f638c3d34a856..44a9479af063d 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -93,7 +93,7 @@ public Storage createClient(String clientName) { * Converts timeout values from the settings to a timeout value for the Google * Cloud SDK **/ - private static Integer toTimeout(TimeValue timeout) { + static Integer toTimeout(TimeValue timeout) { // Null or zero in settings means the default timeout if ((timeout == null) || TimeValue.ZERO.equals(timeout)) { // negative value means using the default value diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 44897819fd9e3..2260417ac1066 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -19,15 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpIOExceptionHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestFactory; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponse; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.testing.http.MockHttpTransport; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; @@ -90,8 +81,8 @@ public void testDefaultHttpRequestInitializer() throws IOException { } public void testToTimeout() { - assertNull(GoogleCloudStorageService.toTimeout(null)); - assertNull(GoogleCloudStorageService.toTimeout(TimeValue.ZERO)); + assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); + assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue()); } } From ec285a67cadd70db3580727155e93bb1fe7fc597 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 18 Apr 2018 20:05:48 +0300 Subject: [PATCH 04/45] Patchy deps compile --- plugins/repository-gcs/build.gradle | 30 +++-- .../gcs/GoogleCloudStorageServiceTests.java | 104 +++++++++--------- 2 files changed, 72 insertions(+), 62 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 068314df76738..932183f654db8 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -32,13 +32,21 @@ esplugin { //] dependencies { - compile 'com.google.cloud:google-cloud-storage:1.25.0' - compile 'com.google.cloud:google-cloud-core:1.25.0' - compile 'com.google.cloud:google-cloud-core-http:1.25.0' - compile 'com.google.api:gax:1.24.0' + compile 'com.google.cloud:google-cloud-storage:1.26.0' + compile 'com.google.cloud:google-cloud-core:1.26.0' + compile 'com.google.cloud:google-cloud-core-http:1.26.0' + compile 'com.google.api:api-common:1.5.0' compile 'org.threeten:threetenbp:1.3.6' - compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' - compile 'com.google.auth:google-auth-library-credentials:0.9.1' + compile 'com.google.api:gax:1.23.0' + compile 'com.google.auth:google-auth-library-oauth2-http:0.9.0' + compile 'com.google.auth:google-auth-library-credentials:0.9.0' + + //compile 'com.google.api:gax-grpc:1.24.0' + //compile 'org.threeten:threetenbp:1.3.6' + //compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' + //compile 'com.google.auth:google-auth-library-credentials:0.9.1' + + // compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" // compile "com.google.api-client:google-api-client:${versions.google}" // compile "com.google.oauth-client:google-oauth-client:${versions.google}" @@ -56,13 +64,13 @@ dependencyLicenses { thirdPartyAudit.excludes = [ // classes are missing - 'com.google.common.base.Splitter', - 'com.google.common.collect.Lists', + //'com.google.common.base.Splitter', + //'com.google.common.collect.Lists', 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', + //'org.apache.avalon.framework.logger.Logger', + //'org.apache.log.Hierarchy', + //'org.apache.log.Logger', ] forbiddenApisTest { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 2260417ac1066..3a8082623d656 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -19,66 +19,68 @@ package org.elasticsearch.repositories.gcs; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - public class GoogleCloudStorageServiceTests extends ESTestCase { /** * Test that the {@link GoogleCloudStorageService.DefaultHttpRequestInitializer} attaches new instances * of {@link HttpIOExceptionHandler} and {@link HttpUnsuccessfulResponseHandler} for every HTTP requests. */ - public void testDefaultHttpRequestInitializer() throws IOException { - final Environment environment = mock(Environment.class); - when(environment.settings()).thenReturn(Settings.EMPTY); - - final GoogleCredential credential = mock(GoogleCredential.class); - when(credential.handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean())).thenReturn(false); - - final TimeValue readTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - final TimeValue connectTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - final String endpoint = randomBoolean() ? randomAlphaOfLength(10) : null; - final String applicationName = randomBoolean() ? randomAlphaOfLength(10) : null; - - final GoogleCloudStorageClientSettings clientSettings = - new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); - - final HttpRequestInitializer initializer = GoogleCloudStorageService.createRequestInitializer(clientSettings); - final HttpRequestFactory requestFactory = new MockHttpTransport().createRequestFactory(initializer); - - final HttpRequest request1 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); - assertSame(credential, request1.getInterceptor()); - assertNotNull(request1.getIOExceptionHandler()); - assertNotNull(request1.getUnsuccessfulResponseHandler()); - - final HttpRequest request2 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); - assertSame(request1.getInterceptor(), request2.getInterceptor()); - assertNotNull(request2.getIOExceptionHandler()); - assertNotSame(request1.getIOExceptionHandler(), request2.getIOExceptionHandler()); - assertNotNull(request2.getUnsuccessfulResponseHandler()); - assertNotSame(request1.getUnsuccessfulResponseHandler(), request2.getUnsuccessfulResponseHandler()); - - request1.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - verify(credential, times(1)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); - - request2.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - verify(credential, times(2)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); - } + // public void testDefaultHttpRequestInitializer() throws IOException { + // final Environment environment = mock(Environment.class); + // when(environment.settings()).thenReturn(Settings.EMPTY); + // + // final GoogleCredential credential = mock(GoogleCredential.class); + // when(credential.handleResponse(any(HttpRequest.class), + // any(HttpResponse.class), anyBoolean())).thenReturn(false); + // + // final TimeValue readTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, + // 120)); + // final TimeValue connectTimeout = + // TimeValue.timeValueSeconds(randomIntBetween(1, 120)); + // final String endpoint = randomBoolean() ? randomAlphaOfLength(10) : null; + // final String applicationName = randomBoolean() ? randomAlphaOfLength(10) : + // null; + // + // final GoogleCloudStorageClientSettings clientSettings = + // new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, + // readTimeout, applicationName); + // + // final HttpRequestInitializer initializer = + // GoogleCloudStorageService.createRequestInitializer(clientSettings); + // final HttpRequestFactory requestFactory = new + // MockHttpTransport().createRequestFactory(initializer); + // + // final HttpRequest request1 = requestFactory.buildGetRequest(new + // GenericUrl()); + // assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); + // assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); + // assertSame(credential, request1.getInterceptor()); + // assertNotNull(request1.getIOExceptionHandler()); + // assertNotNull(request1.getUnsuccessfulResponseHandler()); + // + // final HttpRequest request2 = requestFactory.buildGetRequest(new + // GenericUrl()); + // assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); + // assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); + // assertSame(request1.getInterceptor(), request2.getInterceptor()); + // assertNotNull(request2.getIOExceptionHandler()); + // assertNotSame(request1.getIOExceptionHandler(), + // request2.getIOExceptionHandler()); + // assertNotNull(request2.getUnsuccessfulResponseHandler()); + // assertNotSame(request1.getUnsuccessfulResponseHandler(), + // request2.getUnsuccessfulResponseHandler()); + // + // request1.getUnsuccessfulResponseHandler().handleResponse(null, null, false); + // verify(credential, times(1)).handleResponse(any(HttpRequest.class), + // any(HttpResponse.class), anyBoolean()); + // + // request2.getUnsuccessfulResponseHandler().handleResponse(null, null, false); + // verify(credential, times(2)).handleResponse(any(HttpRequest.class), + // any(HttpResponse.class), anyBoolean()); + // } public void testToTimeout() { assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); From a8bf229cb55049c8f513452e0155cde5b91e227c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 19 Apr 2018 10:36:22 +0300 Subject: [PATCH 05/45] Dependency hell is hell --- plugins/repository-gcs/build.gradle | 23 +- .../licenses/api-common-1.5.0.jar.sha1 | 1 + .../licenses/api-common-LICENSE.txt | 27 +++ .../licenses/api-common-NOTICE.txt | 0 .../licenses/commons-codec-1.10.jar.sha1 | 1 - .../licenses/commons-logging-1.1.3.jar.sha1 | 1 - .../licenses/gax-1.23.0.jar.sha1 | 1 + .../repository-gcs/licenses/gax-LICENSE.txt | 27 +++ .../repository-gcs/licenses/gax-NOTICE.txt | 0 .../licenses/google-api-client-LICENSE.txt | 201 +++++++++++++++++ .../licenses/google-api-client-NOTICE.txt | 0 .../licenses/google-auth-LICENSE.txt | 28 +++ .../licenses/google-auth-NOTICE.txt | 0 ...le-auth-library-credentials-0.9.0.jar.sha1 | 1 + ...le-auth-library-oauth2-http-0.9.0.jar.sha1 | 1 + .../licenses/google-cloud-LICENSE.txt | 201 +++++++++++++++++ .../licenses/google-cloud-NOTICE.txt | 0 .../google-cloud-core-1.26.0.jar.sha1 | 1 + .../google-cloud-core-http-1.26.0.jar.sha1 | 1 + .../google-cloud-storage-1.26.0.jar.sha1 | 1 + ...le-LICENSE.txt => google-http-LICENSE.txt} | 0 .../licenses/google-http-NOTICE.txt | 0 ...ogle-http-client-appengine-1.23.0.jar.sha1 | 1 + .../licenses/google-oauth-client-LICENSE.txt | 28 +++ .../licenses/google-oauth-client-NOTICE.txt | 0 .../licenses/guava-20.0.jar.sha1 | 1 + ...ns-codec-LICENSE.txt => guava-LICENSE.txt} | 0 .../repository-gcs/licenses/guava-NOTICE.txt | 0 .../licenses/httpclient-4.5.2.jar.sha1 | 1 - .../licenses/httpcore-4.4.5.jar.sha1 | 1 - .../licenses/old/commons-codec-LICENSE.txt | 202 ++++++++++++++++++ .../{ => old}/commons-codec-NOTICE.txt | 0 .../{ => old}/commons-logging-LICENSE.txt | 0 .../{ => old}/commons-logging-NOTICE.txt | 0 .../licenses/old/google-LICENSE.txt | 201 +++++++++++++++++ .../licenses/{ => old}/google-NOTICE.txt | 0 .../licenses/{ => old}/httpclient-LICENSE.txt | 0 .../licenses/{ => old}/httpclient-NOTICE.txt | 0 .../licenses/{ => old}/httpcore-LICENSE.txt | 0 .../licenses/{ => old}/httpcore-NOTICE.txt | 0 .../licenses/opencensus-api-0.11.1.jar.sha1 | 1 + .../licenses/opencensus-api-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/opencensus-api-NOTICE.txt | 0 .../proto-google-common-protos-1.8.0.jar.sha1 | 1 + .../proto-google-common-protos-LICENSE.txt | 202 ++++++++++++++++++ .../proto-google-common-protos-NOTICE.txt | 0 .../licenses/threetenbp-1.3.6.jar.sha1 | 1 + .../licenses/threetenbp-LICENSE.txt | 31 +++ .../licenses/threetenbp-NOTICE.txt | 0 .../gcs/GoogleCloudStorageBlobStore.java | 7 +- .../repositories/gcs/MockStorage.java | 2 + 51 files changed, 1385 insertions(+), 13 deletions(-) create mode 100644 plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/api-common-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/api-common-NOTICE.txt delete mode 100644 plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/gax-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-client-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-client-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-auth-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-auth-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-cloud-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 rename plugins/repository-gcs/licenses/{google-LICENSE.txt => google-http-LICENSE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/google-http-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/guava-20.0.jar.sha1 rename plugins/repository-gcs/licenses/{commons-codec-LICENSE.txt => guava-LICENSE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/guava-NOTICE.txt delete mode 100644 plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt rename plugins/repository-gcs/licenses/{ => old}/commons-codec-NOTICE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/commons-logging-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/commons-logging-NOTICE.txt (100%) create mode 100644 plugins/repository-gcs/licenses/old/google-LICENSE.txt rename plugins/repository-gcs/licenses/{ => old}/google-NOTICE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpclient-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpclient-NOTICE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpcore-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{ => old}/httpcore-NOTICE.txt (100%) create mode 100644 plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/opencensus-api-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/opencensus-api-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/threetenbp-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/threetenbp-NOTICE.txt diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 932183f654db8..8b37493d638f5 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -35,11 +35,21 @@ dependencies { compile 'com.google.cloud:google-cloud-storage:1.26.0' compile 'com.google.cloud:google-cloud-core:1.26.0' compile 'com.google.cloud:google-cloud-core-http:1.26.0' + compile 'com.google.api-client:google-api-client:1.23.0' + compile 'com.google.auth:google-auth-library-oauth2-http:0.9.0' + compile 'com.google.auth:google-auth-library-credentials:0.9.0' + compile 'com.google.oauth-client:google-oauth-client:1.23.0' compile 'com.google.api:api-common:1.5.0' compile 'org.threeten:threetenbp:1.3.6' compile 'com.google.api:gax:1.23.0' - compile 'com.google.auth:google-auth-library-oauth2-http:0.9.0' - compile 'com.google.auth:google-auth-library-credentials:0.9.0' + compile 'com.google.http-client:google-http-client:1.23.0' + compile 'com.google.http-client:google-http-client-jackson2:1.23.0' + compile 'com.google.http-client:google-http-client-appengine:1.23.0' + compile 'com.google.guava:guava:20.0' + compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' + + compile 'io.opencensus:opencensus-api:0.11.1' + compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' //compile 'com.google.api:gax-grpc:1.24.0' //compile 'org.threeten:threetenbp:1.3.6' @@ -49,7 +59,6 @@ dependencies { // compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" // compile "com.google.api-client:google-api-client:${versions.google}" -// compile "com.google.oauth-client:google-oauth-client:${versions.google}" // compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" // compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" // compile "commons-logging:commons-logging:${versions.commonslogging}" @@ -59,15 +68,17 @@ dependencies { } dependencyLicenses { - mapping from: /google-.*/, to: 'google' + mapping from: /google-cloud-.*/, to: 'google-cloud' + mapping from: /google-auth-.*/, to: 'google-auth' + mapping from: /google-http-.*/, to: 'google-http' } thirdPartyAudit.excludes = [ // classes are missing //'com.google.common.base.Splitter', //'com.google.common.collect.Lists', - 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', + //'javax.servlet.ServletContextEvent', + //'javax.servlet.ServletContextListener', //'org.apache.avalon.framework.logger.Logger', //'org.apache.log.Hierarchy', //'org.apache.log.Logger', diff --git a/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 new file mode 100644 index 0000000000000..64435356e5eaf --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 @@ -0,0 +1 @@ +7e537338d40a57ad469239acb6d828fa544fb52b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/api-common-LICENSE.txt b/plugins/repository-gcs/licenses/api-common-LICENSE.txt new file mode 100644 index 0000000000000..6d16b6578a2f0 --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/api-common-NOTICE.txt b/plugins/repository-gcs/licenses/api-common-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 deleted file mode 100644 index 3fe8682a1b0f9..0000000000000 --- a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..30dae6ac90110 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 @@ -0,0 +1 @@ +09cbdb558449d6fc16667043c31c541b8d02ace4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-LICENSE.txt b/plugins/repository-gcs/licenses/gax-LICENSE.txt new file mode 100644 index 0000000000000..267561bb386de --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/gax-NOTICE.txt b/plugins/repository-gcs/licenses/gax-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-api-client-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-client-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-auth-LICENSE.txt b/plugins/repository-gcs/licenses/google-auth-LICENSE.txt new file mode 100644 index 0000000000000..12edf23c6711f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/google-auth-NOTICE.txt b/plugins/repository-gcs/licenses/google-auth-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 new file mode 100644 index 0000000000000..bd36d59b66624 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 @@ -0,0 +1 @@ +8e2b181feff6005c9cbc6f5c1c1e2d3ec9138d46 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 new file mode 100644 index 0000000000000..143c00920204c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 @@ -0,0 +1 @@ +04e6152c3aead24148627e84f5651e79698c00d9 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt b/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-cloud-NOTICE.txt b/plugins/repository-gcs/licenses/google-cloud-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 new file mode 100644 index 0000000000000..aa2095bb7f4aa --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 @@ -0,0 +1 @@ +5a65c299210381c62043d284f6dec0ccaacac19e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 new file mode 100644 index 0000000000000..0ec125ff07bd2 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 @@ -0,0 +1 @@ +0edc507afb9970900787d1204a1ee894b88abf06 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 new file mode 100644 index 0000000000000..ea8aca863e065 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 @@ -0,0 +1 @@ +2752a91ffd8ca767942be823390a620791812e9c \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-LICENSE.txt b/plugins/repository-gcs/licenses/google-http-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-LICENSE.txt rename to plugins/repository-gcs/licenses/google-http-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/google-http-NOTICE.txt b/plugins/repository-gcs/licenses/google-http-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..823c3a85089a5 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 @@ -0,0 +1 @@ +0eda0d0f758c1cc525866e52e1226c4eb579d130 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt b/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt new file mode 100644 index 0000000000000..12edf23c6711f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt b/plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 b/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 new file mode 100644 index 0000000000000..7b6ae09060b29 --- /dev/null +++ b/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 @@ -0,0 +1 @@ +89507701249388e1ed5ddcf8c41f4ce1be7831ef \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/guava-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-codec-LICENSE.txt rename to plugins/repository-gcs/licenses/guava-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/guava-NOTICE.txt b/plugins/repository-gcs/licenses/guava-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 deleted file mode 100644 index 6937112a09fb6..0000000000000 --- a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 deleted file mode 100644 index 581726601745b..0000000000000 --- a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt b/plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-codec-NOTICE.txt rename to plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt b/plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-logging-LICENSE.txt rename to plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt b/plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-logging-NOTICE.txt rename to plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/old/google-LICENSE.txt b/plugins/repository-gcs/licenses/old/google-LICENSE.txt new file mode 100644 index 0000000000000..980a15ac24eeb --- /dev/null +++ b/plugins/repository-gcs/licenses/old/google-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-NOTICE.txt b/plugins/repository-gcs/licenses/old/google-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-NOTICE.txt rename to plugins/repository-gcs/licenses/old/google-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpclient-LICENSE.txt rename to plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpclient-NOTICE.txt rename to plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpcore-LICENSE.txt rename to plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpcore-NOTICE.txt rename to plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 new file mode 100644 index 0000000000000..61d8e3b148144 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 @@ -0,0 +1 @@ +54689fbf750a7f26e34fa1f1f96b883c53f51486 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-api-LICENSE.txt b/plugins/repository-gcs/licenses/opencensus-api-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-api-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/opencensus-api-NOTICE.txt b/plugins/repository-gcs/licenses/opencensus-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 new file mode 100644 index 0000000000000..0a2dee4447e92 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 @@ -0,0 +1 @@ +b3282312ba82536fc9a7778cabfde149a875e877 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt b/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt b/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 new file mode 100644 index 0000000000000..65c16fed4a07b --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 @@ -0,0 +1 @@ +89dcc04a7e028c3c963413a71f950703cf51f057 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt b/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt new file mode 100644 index 0000000000000..fcdfc8f0d0774 --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * * Neither the name of JSR-310 nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ diff --git a/plugins/repository-gcs/licenses/threetenbp-NOTICE.txt b/plugins/repository-gcs/licenses/threetenbp-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index bdb70fc907ee8..3bc0f503c265d 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -30,6 +30,7 @@ import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.Storage.CopyRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; @@ -38,6 +39,7 @@ import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.settings.Settings; import java.io.IOException; import java.io.InputStream; @@ -168,7 +170,7 @@ InputStream readBlob(String blobName) throws IOException { // first read pull data buffer.flip(); return new InputStream() { - + @SuppressForbidden(reason = "this reader is backed by a socket not a file") @Override public int read() throws IOException { try { @@ -206,8 +208,7 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I int limit; while ((limit = inputStream.read(buffer)) >= 0) { try { - final int bs = writer.write(ByteBuffer.wrap(buffer, 0, limit)); - assert bs == limit : "Write should return only when all bytes have been written"; + Channels.writeToChannel(buffer, 0, limit, writer); bytesWritten += limit; } catch (final Exception e) { throw new IOException("Failed to write blob [" + blobName + "] into bucket [" + bucket + "].", e); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index be141a2d9fbe6..caa59070ba219 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,6 +19,7 @@ package org.elasticsearch.repositories.gcs; +import org.elasticsearch.common.SuppressForbidden; import org.mockito.Matchers; import com.google.api.gax.paging.Page; @@ -65,6 +66,7 @@ class MockStorage implements Storage { private final Bucket theBucket; private final ConcurrentMap blobsMap; + @SuppressForbidden(reason = "mocking here requires reflection that trespasses the access system") MockStorage(final String bucketName, final ConcurrentMap blobs) { this.blobsMap = blobs; // mock bucket From c5de4d64d058f3c25a2146d757c7e6b209bbbd0b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 19 Apr 2018 16:50:17 +0300 Subject: [PATCH 06/45] Dumb byte to int bug --- plugins/repository-gcs/build.gradle | 2 ++ .../gcs/GoogleCloudStorageBlobStore.java | 35 +++++++++++-------- .../repositories/gcs/MockStorage.java | 11 +++--- 3 files changed, 30 insertions(+), 18 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 8b37493d638f5..c59d6fd1fd6f1 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -47,6 +47,8 @@ dependencies { compile 'com.google.http-client:google-http-client-appengine:1.23.0' compile 'com.google.guava:guava:20.0' compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' + //compile 'com.fasterxml.jackson.core:jackson-core:2.1.3' + compile 'com.google.protobuf:protobuf-java:3.5.1' compile 'io.opencensus:opencensus-api:0.11.1' compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 3bc0f503c265d..bfac50097fb53 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.settings.Settings; @@ -155,40 +156,46 @@ boolean blobExists(String blobName) throws IOException { */ InputStream readBlob(String blobName) throws IOException { final BlobId blobId = BlobId.of(bucket, blobName); - final ReadChannel reader = SocketAccess.doPrivilegedIOException(() -> + final Tuple readerAndSize = SocketAccess.doPrivilegedIOException(() -> { final Blob blob = storage.get(blobId); if (blob == null) { return null; } - return blob.reader(); + return new Tuple<>(blob.reader(), blob.getSize()); }); - if (reader == null) { + if (readerAndSize == null) { throw new IOException("Blob [" + blobName + "] does not exit."); } final ByteBuffer buffer = ByteBuffer.allocate(64 * 1024); // first read pull data buffer.flip(); return new InputStream() { - @SuppressForbidden(reason = "this reader is backed by a socket not a file") + long bytesRemaining = readerAndSize.v2(); + @SuppressForbidden(reason = "this reader is backed by a socket instead of a file") @Override public int read() throws IOException { - try { - return buffer.get(); - } catch (final BufferUnderflowException e) { - // pull another chunck - buffer.clear(); - if (SocketAccess.doPrivilegedIOException(() -> reader.read(buffer)) < 0) { - return -1; + while (true) { + try { + return (0xFF & buffer.get()); + } catch (final BufferUnderflowException e) { + // pull another chunck + buffer.clear(); + final long bytesRead = SocketAccess.doPrivilegedIOException(() -> readerAndSize.v1().read(buffer)); + if (bytesRead < 0) { + return -1; + } else if ((bytesRead == 0) && (bytesRemaining == 0)) { + return -1; + } + bytesRemaining -= bytesRead; + buffer.flip(); } - buffer.flip(); - return read(); } } @Override public void close() throws IOException { - reader.close(); + readerAndSize.v1().close(); } }; } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index caa59070ba219..0d1a68e9ee0ce 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -75,10 +75,10 @@ class MockStorage implements Storage { doAnswer(invocation -> { assert invocation.getArguments().length == 1 : "Only a single filter is mocked"; final BlobListOption prefixFilter = (BlobListOption) invocation.getArguments()[0]; - final Method optionMethod = BlobListOption.class.getDeclaredMethod("getRpcOption"); + final Method optionMethod = BlobListOption.class.getSuperclass().getDeclaredMethod("getRpcOption"); optionMethod.setAccessible(true); assert StorageRpc.Option.PREFIX.equals(optionMethod.invoke(prefixFilter)) : "Only the prefix filter is mocked"; - final Method valueMethod = BlobListOption.class.getDeclaredMethod("getValue"); + final Method valueMethod = BlobListOption.class.getSuperclass().getDeclaredMethod("getValue"); valueMethod.setAccessible(true); final String prefixValue = (String) valueMethod.invoke(prefixFilter); return new Page() { @@ -519,6 +519,9 @@ public boolean isOpen() { @Override public int read(ByteBuffer dst) throws IOException { + if (byteBuffer.hasRemaining() == false) { + return -1; + } final int size1 = dst.remaining(); while (dst.hasRemaining() && byteBuffer.hasRemaining()) { dst.put(byteBuffer.get()); @@ -562,7 +565,7 @@ private static Blob constructMockBlob(String blobName, byte[] data, ConcurrentMa when(ans.getResult()).thenReturn(copiedMockBlob); when(ans.isDone()).thenReturn(true); return ans; - }).when(blobMock.copyTo(Matchers.anyString(), Matchers.anyString(), Matchers.anyVararg())); + }).when(blobMock).copyTo(Matchers.anyString(), Matchers.anyString(), Matchers.anyVararg()); doAnswer(invocation -> { final BlobId blobId = (BlobId) invocation.getArguments()[0]; final Blob copiedMockBlob = constructMockBlob(blobId.getName(), data, blobsMap); @@ -570,7 +573,7 @@ private static Blob constructMockBlob(String blobName, byte[] data, ConcurrentMa when(ans.getResult()).thenReturn(copiedMockBlob); when(ans.isDone()).thenReturn(true); return ans; - }).when(blobMock.copyTo(Matchers.any(BlobId.class), Matchers.anyVararg())); + }).when(blobMock).copyTo(Matchers.any(BlobId.class), Matchers.anyVararg()); blobsMap.put(blobName, blobMock); return blobMock; } From a825d7147037887d0890ba3d615d9686ddfd43c1 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 20 Apr 2018 17:01:40 +0300 Subject: [PATCH 07/45] More WIP --- ...api-services-storage-v1-rev115-LICENSE.txt | 201 ++++++++++++++++++ ...-api-services-storage-v1-rev115-NOTICE.txt | 0 .../licenses/protobuf-java-3.5.1.jar.sha1 | 1 + .../gcs/GoogleCloudStorageClientSettings.java | 15 +- 4 files changed, 213 insertions(+), 4 deletions(-) create mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 new file mode 100644 index 0000000000000..946cc395de28d --- /dev/null +++ b/plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 @@ -0,0 +1 @@ +8c3492f7662fa1cbf8ca76a0f5eb1146f7725acd \ No newline at end of file diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index eaf1e995ee264..a2962f643b96c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -55,9 +55,15 @@ public class GoogleCloudStorageClientSettings { /** An override for the Storage host name to connect to. */ static final Setting.AffixSetting HOST_SETTING = Setting.affixKeySetting(PREFIX, "host", - key -> Setting.simpleString(key, - ENDPOINT_SETTING.getConcreteSetting(key.substring(0, key.length() - "host".length()) + "endpoint"), - Setting.Property.NodeScope)); + key -> { + if (key.endsWith("host")) { + return Setting.simpleString(key, + ENDPOINT_SETTING.getConcreteSetting(key.substring(0, key.length() - "host".length()) + "endpoint"), + Setting.Property.NodeScope); + } else { + return Setting.simpleString(key, Setting.Property.NodeScope); + } + }); /** An override for the Google Project ID. */ static final Setting.AffixSetting PROJECT_ID_SETTING = Setting.affixKeySetting(PREFIX, "project_id", @@ -160,7 +166,8 @@ public static Map load(final Settings static GoogleCloudStorageClientSettings getClientSettings(final Settings settings, final String clientName) { return new GoogleCloudStorageClientSettings( loadCredential(settings, clientName), - getConfigValue(settings, clientName, HOST_SETTING), getConfigValue(settings, clientName, PROJECT_ID_SETTING), + getConfigValue(settings, clientName, HOST_SETTING), + getConfigValue(settings, clientName, PROJECT_ID_SETTING), getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), getConfigValue(settings, clientName, APPLICATION_NAME_SETTING) From 13f039a86fbc5426902db7493fef2613eb5bddc3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 21 Apr 2018 11:23:03 +0300 Subject: [PATCH 08/45] X content error --- .../repositories/gcs/GoogleCloudStorageBlobStore.java | 6 ++++-- .../org/elasticsearch/repositories/gcs/MockStorage.java | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index bfac50097fb53..c317b59df5d6a 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -165,14 +165,16 @@ InputStream readBlob(String blobName) throws IOException { return new Tuple<>(blob.reader(), blob.getSize()); }); if (readerAndSize == null) { - throw new IOException("Blob [" + blobName + "] does not exit."); + throw new NoSuchFileException("Blob [" + blobName + "] does not exit."); } final ByteBuffer buffer = ByteBuffer.allocate(64 * 1024); // first read pull data buffer.flip(); return new InputStream() { long bytesRemaining = readerAndSize.v2(); - @SuppressForbidden(reason = "this reader is backed by a socket instead of a file") + + @SuppressForbidden(reason = "the reader channel is backed by a socket instead of a file," + + "ie. it is not seekable and reading should advance the readers's internal position") @Override public int read() throws IOException { while (true) { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 0d1a68e9ee0ce..9c7d8581b36db 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -67,8 +67,8 @@ class MockStorage implements Storage { private final ConcurrentMap blobsMap; @SuppressForbidden(reason = "mocking here requires reflection that trespasses the access system") - MockStorage(final String bucketName, final ConcurrentMap blobs) { - this.blobsMap = blobs; + MockStorage(final String bucketName, final ConcurrentMap blobsMap) { + this.blobsMap = blobsMap; // mock bucket this.theBucket = mock(Bucket.class); when(this.theBucket.getName()).thenReturn(bucketName); @@ -104,7 +104,7 @@ public Iterable iterateAll() { @Override public Iterable getValues() { - return () -> blobs.entrySet() + return () -> MockStorage.this.blobsMap.entrySet() .stream() .filter(entry1 -> entry1.getKey().startsWith(prefixValue)) .map(entry2 -> entry2.getValue()) From c2be3592ad4f3fff28acae80bebdfde56b871824 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 21 Apr 2018 14:41:35 +0300 Subject: [PATCH 09/45] Dreadfull mock reader bug --- .../repositories/gcs/GoogleCloudStorageBlobStore.java | 5 +++-- .../java/org/elasticsearch/repositories/gcs/MockStorage.java | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index c317b59df5d6a..d436caa1deb83 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -167,7 +167,7 @@ InputStream readBlob(String blobName) throws IOException { if (readerAndSize == null) { throw new NoSuchFileException("Blob [" + blobName + "] does not exit."); } - final ByteBuffer buffer = ByteBuffer.allocate(64 * 1024); + final ByteBuffer buffer = ByteBuffer.allocate(8 * 1024); // first read pull data buffer.flip(); return new InputStream() { @@ -184,13 +184,14 @@ public int read() throws IOException { // pull another chunck buffer.clear(); final long bytesRead = SocketAccess.doPrivilegedIOException(() -> readerAndSize.v1().read(buffer)); + buffer.flip(); if (bytesRead < 0) { return -1; } else if ((bytesRead == 0) && (bytesRemaining == 0)) { return -1; } bytesRemaining -= bytesRead; - buffer.flip(); + // retry in case of non-blocking socket } } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 9c7d8581b36db..97957ea43e19c 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -555,7 +555,9 @@ private static Blob constructMockBlob(String blobName, byte[] data, ConcurrentMa when(blobMock.getName()).thenReturn(blobName); when(blobMock.getSize()).thenReturn((long) data.length); when(blobMock.reload(Matchers.anyVararg())).thenReturn(blobMock); - when(blobMock.reader(Matchers.anyVararg())).thenReturn(new ReadChannelFromByteArray(data)); + doAnswer(invocation -> { + return new ReadChannelFromByteArray(data); + }).when(blobMock).reader(Matchers.anyVararg()); when(blobMock.copyTo(Matchers.anyString(), Matchers.anyVararg())) .thenThrow(new RuntimeException("Mock not implemented. Only a single bucket is mocked.")); doAnswer(invocation -> { From ac1dd1c4005b61af8a231026b70f89fcab09dad7 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 21 Apr 2018 18:27:09 +0300 Subject: [PATCH 10/45] Unit and Integ tests finally budge --- ...GoogleCloudStorageClientSettingsTests.java | 71 ++++++++++++------- 1 file changed, 45 insertions(+), 26 deletions(-) diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index 2f07cb1ce2435..845cd4066fa5f 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -29,8 +30,10 @@ import java.nio.charset.StandardCharsets; import java.security.KeyPair; import java.security.KeyPairGenerator; +import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; @@ -38,6 +41,7 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.HOST_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.PROJECT_ID_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.getClientSettings; @@ -46,25 +50,26 @@ public class GoogleCloudStorageClientSettingsTests extends ESTestCase { public void testLoadWithEmptySettings() { - Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); + final Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); assertEquals(1, clientsSettings.size()); assertNotNull(clientsSettings.get("default")); } public void testLoad() throws Exception { final int nbClients = randomIntBetween(1, 5); - final Tuple, Settings> randomClients = randomClients(nbClients); + final List> deprecationWarnings = new ArrayList<>(); + final Tuple, Settings> randomClients = randomClients(nbClients, deprecationWarnings); final Map expectedClientsSettings = randomClients.v1(); - Map actualClientsSettings = GoogleCloudStorageClientSettings.load(randomClients.v2()); + final Map actualClientsSettings = GoogleCloudStorageClientSettings + .load(randomClients.v2()); assertEquals(expectedClientsSettings.size(), actualClientsSettings.size()); - for (String clientName : expectedClientsSettings.keySet()) { - GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); + for (final String clientName : expectedClientsSettings.keySet()) { + final GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); assertNotNull(actualClientSettings); - GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); + final GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); assertNotNull(expectedClientSettings); - assertGoogleCredential(expectedClientSettings.getCredential(), actualClientSettings.getCredential()); assertEquals(expectedClientSettings.getHost(), actualClientSettings.getHost()); assertEquals(expectedClientSettings.getProjectId(), actualClientSettings.getProjectId()); @@ -72,34 +77,40 @@ public void testLoad() throws Exception { assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout()); assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName()); } + + if (deprecationWarnings.isEmpty() == false) { + assertSettingDeprecationsAndWarnings(deprecationWarnings.toArray(new Setting[0])); + } } public void testLoadCredential() throws Exception { - Tuple, Settings> randomClient = randomClients(1); - GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); - String clientName = randomClient.v1().keySet().iterator().next(); - + final List> deprecationWarnings = new ArrayList<>(); + final Tuple, Settings> randomClient = randomClients(1, deprecationWarnings); + final GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); + final String clientName = randomClient.v1().keySet().iterator().next(); assertGoogleCredential(expectedClientSettings.getCredential(), loadCredential(randomClient.v2(), clientName)); } /** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/ - private Tuple, Settings> randomClients(final int nbClients) throws Exception { + private Tuple, Settings> randomClients(final int nbClients, + final List> deprecationWarnings) + throws Exception { final Map expectedClients = new HashMap<>(); - expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); final Settings.Builder settings = Settings.builder(); final MockSecureSettings secureSettings = new MockSecureSettings(); for (int i = 0; i < nbClients; i++) { - String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); - - GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings); + final String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + final GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings, deprecationWarnings); expectedClients.put(clientName, clientSettings); } if (randomBoolean()) { - GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings); + final GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings, deprecationWarnings); expectedClients.put("default", clientSettings); + } else { + expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); } return Tuple.tuple(expectedClients, settings.setSecureSettings(secureSettings).build()); @@ -108,16 +119,22 @@ private Tuple, Settings> randomCli /** Generates a random GoogleCloudStorageClientSettings along with the Settings to build it **/ private static GoogleCloudStorageClientSettings randomClient(final String clientName, final Settings.Builder settings, - final MockSecureSettings secureSettings) throws Exception { + final MockSecureSettings secureSettings, + final List> deprecationWarnings) throws Exception { - Tuple credentials = randomCredential(clientName); - ServiceAccountCredentials credential = credentials.v1(); + final Tuple credentials = randomCredential(clientName); + final ServiceAccountCredentials credential = credentials.v1(); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).getKey(), credentials.v2()); String host; if (randomBoolean()) { host = randomAlphaOfLength(5); - settings.put(HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); + if (randomBoolean()) { + settings.put(HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); + } else { + settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); + deprecationWarnings.add(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName)); + } } else { host = HOST_SETTING.getDefault(Settings.EMPTY); } @@ -150,6 +167,7 @@ private static GoogleCloudStorageClientSettings randomClient(final String client if (randomBoolean()) { applicationName = randomAlphaOfLength(5); settings.put(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName); + deprecationWarnings.add(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName)); } else { applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); } @@ -159,16 +177,17 @@ private static GoogleCloudStorageClientSettings randomClient(final String client /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ private static Tuple randomCredential(final String clientName) throws Exception { - KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); + final KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); - ServiceAccountCredentials.Builder credentialBuilder = ServiceAccountCredentials.newBuilder(); - credentialBuilder.setClientId(clientName); + final ServiceAccountCredentials.Builder credentialBuilder = ServiceAccountCredentials.newBuilder(); + credentialBuilder.setClientId("id_" + clientName); + credentialBuilder.setClientEmail(clientName); credentialBuilder.setProjectId("project_id_" + clientName); credentialBuilder.setPrivateKey(keyPair.getPrivate()); credentialBuilder.setPrivateKeyId("private_key_id_" + clientName); - String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); - String serviceAccount = "{\"type\":\"service_account\"," + + final String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); + final String serviceAccount = "{\"type\":\"service_account\"," + "\"project_id\":\"project_id_" + clientName + "\"," + "\"private_key_id\":\"private_key_id_" + clientName + "\"," + "\"private_key\":\"-----BEGIN PRIVATE KEY-----\\n" + From 893368197bf8afc12fff7b0a4ab5411280d67404 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 23 Apr 2018 14:03:35 +0300 Subject: [PATCH 11/45] Authentication failed 401 --- plugins/repository-gcs/build.gradle | 11 +- ...google-http-client-jackson-1.23.0.jar.sha1 | 1 + .../licenses/grpc-context-1.9.0.jar.sha1 | 1 + ...i-LICENSE.txt => grpc-context-LICENSE.txt} | 0 ...api-NOTICE.txt => grpc-context-NOTICE.txt} | 0 .../licenses/jackson-core-asl-1.9.13.jar.sha1 | 1 + .../licenses/jackson-core-asl-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/jackson-core-asl-NOTICE.txt | 0 .../licenses/opencensus-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/opencensus-NOTICE.txt | 0 ...encensus-contrib-http-util-0.11.1.jar.sha1 | 1 + .../licenses/protobuf-java-3.5.1.jar.sha1 | 1 - .../gcs/GoogleCloudStorageClientSettings.java | 2 +- .../gcs/GoogleCloudStoragePlugin.java | 71 ++++++ .../gcs/GoogleCloudStorageService.java | 6 +- 15 files changed, 492 insertions(+), 7 deletions(-) create mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 rename plugins/repository-gcs/licenses/{opencensus-api-LICENSE.txt => grpc-context-LICENSE.txt} (100%) rename plugins/repository-gcs/licenses/{opencensus-api-NOTICE.txt => grpc-context-NOTICE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/opencensus-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/opencensus-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index c59d6fd1fd6f1..d948c8dfb95dd 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -43,16 +43,21 @@ dependencies { compile 'org.threeten:threetenbp:1.3.6' compile 'com.google.api:gax:1.23.0' compile 'com.google.http-client:google-http-client:1.23.0' + compile 'com.google.http-client:google-http-client-jackson:1.23.0' compile 'com.google.http-client:google-http-client-jackson2:1.23.0' compile 'com.google.http-client:google-http-client-appengine:1.23.0' compile 'com.google.guava:guava:20.0' compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' - //compile 'com.fasterxml.jackson.core:jackson-core:2.1.3' - compile 'com.google.protobuf:protobuf-java:3.5.1' + compile 'io.opencensus:opencensus-api:0.11.1' + compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' + compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' + compile 'io.grpc:grpc-context:1.9.0' compile 'io.opencensus:opencensus-api:0.11.1' compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' + //compile 'com.google.protobuf:protobuf-java:3.5.1' + //compile 'com.fasterxml.jackson.core:jackson-core:2.1.3' //compile 'com.google.api:gax-grpc:1.24.0' //compile 'org.threeten:threetenbp:1.3.6' //compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' @@ -73,6 +78,7 @@ dependencyLicenses { mapping from: /google-cloud-.*/, to: 'google-cloud' mapping from: /google-auth-.*/, to: 'google-auth' mapping from: /google-http-.*/, to: 'google-http' + mapping from: /opencensus.*/, to: 'opencensus' } thirdPartyAudit.excludes = [ @@ -132,4 +138,5 @@ integTestCluster { /* Use a closure on the string to delay evaluation until tests are executed */ setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.project_id', "integration_test" } diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..85ba0ab798d05 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 @@ -0,0 +1 @@ +a72ea3a197937ef63a893e73df312dac0d813663 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..02bac0e492074 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 @@ -0,0 +1 @@ +28b0836f48c9705abf73829bbc536dba29a1329a \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-api-LICENSE.txt b/plugins/repository-gcs/licenses/grpc-context-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/opencensus-api-LICENSE.txt rename to plugins/repository-gcs/licenses/grpc-context-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/opencensus-api-NOTICE.txt b/plugins/repository-gcs/licenses/grpc-context-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/opencensus-api-NOTICE.txt rename to plugins/repository-gcs/licenses/grpc-context-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 new file mode 100644 index 0000000000000..c5016bf828d60 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 @@ -0,0 +1 @@ +3c304d70f42f832e0a86d45bd437f692129299a4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt b/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt b/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/opencensus-LICENSE.txt b/plugins/repository-gcs/licenses/opencensus-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/opencensus-NOTICE.txt b/plugins/repository-gcs/licenses/opencensus-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 new file mode 100644 index 0000000000000..c0b04f0f8ccce --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 @@ -0,0 +1 @@ +82e572b41e81ecf58d0d1e9a3953a05aa8f9c84b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 deleted file mode 100644 index 946cc395de28d..0000000000000 --- a/plugins/repository-gcs/licenses/protobuf-java-3.5.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8c3492f7662fa1cbf8ca76a0f5eb1146f7725acd \ No newline at end of file diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index a2962f643b96c..5aeb576502f3f 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -135,7 +135,7 @@ public String getHost() { } public String getProjectId() { - return projectId; + return projectId != null ? projectId : (credential != null ? credential.getProjectId() : null); } public TimeValue getConnectTimeout() { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 16d7f654012c2..ec0df08f2a6ac 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -19,6 +19,18 @@ package org.elasticsearch.repositories.gcs; +import com.google.api.client.auth.oauth2.TokenRequest; +import com.google.api.client.auth.oauth2.TokenResponse; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpHeaders; +import com.google.api.client.json.GenericJson; +import com.google.api.client.json.webtoken.JsonWebSignature; +import com.google.api.client.json.webtoken.JsonWebToken; +import com.google.api.client.util.ClassInfo; +import com.google.api.client.util.Data; + +import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -27,6 +39,8 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -34,6 +48,63 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { + static { + /* + * Google HTTP client changes access levels because its silly and we can't allow + * that on any old stack stack so we pull it here, up front, so we can cleanly + * check the permissions for it. Without this changing the permission can fail + * if any part of core is on the stack because our plugin permissions don't + * allow core to "reach through" plugins to change the permission. Because + * that'd be silly. + */ + SpecialPermission.check(); + AccessController.doPrivileged((PrivilegedAction) () -> { + // ClassInfo put in cache all the fields of a given class + // that are annoted with @Key; at the same time it changes + // the field access level using setAccessible(). Calling + // them here put the ClassInfo in cache (they are never evicted) + // before the SecurityManager is installed. + ClassInfo.of(HttpHeaders.class, true); + + ClassInfo.of(JsonWebSignature.Header.class, false); + ClassInfo.of(JsonWebToken.Payload.class, false); + + ClassInfo.of(TokenRequest.class, false); + ClassInfo.of(TokenResponse.class, false); + + ClassInfo.of(GenericJson.class, false); + ClassInfo.of(GenericUrl.class, false); + + Data.nullOf(GoogleJsonError.ErrorInfo.class); + ClassInfo.of(GoogleJsonError.class, false); + + // Data.nullOf(Bucket.Cors.class); + // ClassInfo.of(Bucket.class, false); + // ClassInfo.of(Bucket.Cors.class, false); + // ClassInfo.of(Bucket.Lifecycle.class, false); + // ClassInfo.of(Bucket.Logging.class, false); + // ClassInfo.of(Bucket.Owner.class, false); + // ClassInfo.of(Bucket.Versioning.class, false); + // ClassInfo.of(Bucket.Website.class, false); + // + // ClassInfo.of(StorageObject.class, false); + // ClassInfo.of(StorageObject.Owner.class, false); + // + // ClassInfo.of(Objects.class, false); + // + // ClassInfo.of(Storage.Buckets.Get.class, false); + // ClassInfo.of(Storage.Buckets.Insert.class, false); + // + // ClassInfo.of(Storage.Objects.Get.class, false); + // ClassInfo.of(Storage.Objects.Insert.class, false); + // ClassInfo.of(Storage.Objects.Delete.class, false); + // ClassInfo.of(Storage.Objects.Copy.class, false); + // ClassInfo.of(Storage.Objects.List.class, false); + + return null; + }); + } + private final Map clientsSettings; public GoogleCloudStoragePlugin(final Settings settings) { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 44a9479af063d..c82fa565463b0 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -77,15 +77,15 @@ public Storage createClient(String clientName) { } return mapBuilder.immutableMap(); }); - if (Strings.hasLength(clientSettings.getProjectId())) { - storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); - } if (Strings.hasLength(clientSettings.getHost())) { storageOptionsBuilder.setHost(clientSettings.getHost()); } if (clientSettings.getCredential() != null) { storageOptionsBuilder.setCredentials(clientSettings.getCredential()); } + if (Strings.hasLength(clientSettings.getProjectId())) { + storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); + } return storageOptionsBuilder.build().getService(); } From 87d782a91c5b78aec9db64050cb88f6bacd63d86 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 23 Apr 2018 17:32:27 +0300 Subject: [PATCH 12/45] Storage scopes --- .../gcs/GoogleCloudStorageClientSettings.java | 22 +++++++++++++------ ...GoogleCloudStorageClientSettingsTests.java | 2 +- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 5aeb576502f3f..5ab627ee73648 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.repositories.gcs; +import com.google.api.services.storage.StorageScopes; import com.google.auth.oauth2.ServiceAccountCredentials; - import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -175,13 +175,16 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting } /** - * Loads the service account file corresponding to a given client name. If no file is defined for the client, - * a {@code null} credential is returned. + * Loads the service account file corresponding to a given client name. If no + * file is defined for the client, a {@code null} credential is returned. * - * @param settings the {@link Settings} - * @param clientName the client name + * @param settings + * the {@link Settings} + * @param clientName + * the client name * - * @return the {@link GoogleCredential} to use for the given client, {@code null} if no service account is defined. + * @return the {@link ServiceAccountCredentials} to use for the given client, + * {@code null} if no service account is defined. */ static ServiceAccountCredentials loadCredential(final Settings settings, final String clientName) { try { @@ -191,7 +194,12 @@ static ServiceAccountCredentials loadCredential(final Settings settings, final S return null; } try (InputStream credStream = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).get(settings)) { - return ServiceAccountCredentials.fromStream(credStream); + final ServiceAccountCredentials credentials = ServiceAccountCredentials.fromStream(credStream); + if (credentials.createScopedRequired()) { + return (ServiceAccountCredentials) credentials + .createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + } + return credentials; } } catch (final IOException e) { throw new UncheckedIOException(e); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index 845cd4066fa5f..be8a2749a4700 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -209,7 +209,7 @@ private static TimeValue randomTimeout() { return randomFrom(TimeValue.MINUS_ONE, TimeValue.ZERO, TimeValue.parseTimeValue(randomPositiveTimeValue(), "test")); } - private static void assertGoogleCredential(final ServiceAccountCredentials expected, final ServiceAccountCredentials actual) { + private static void assertGoogleCredential(ServiceAccountCredentials expected, ServiceAccountCredentials actual) { if (expected != null) { assertEquals(expected.getServiceAccountUser(), actual.getServiceAccountUser()); assertEquals(expected.getClientId(), actual.getClientId()); From b1cbf706cd0909ed8e9c8cbcbb9f83bbcc109c13 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 24 Apr 2018 02:38:14 +0300 Subject: [PATCH 13/45] Closer --- plugins/repository-gcs/build.gradle | 1 + .../repositories/gcs/GoogleCloudStorageClientSettings.java | 4 +++- .../src/main/plugin-metadata/plugin-security.policy | 2 ++ 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index d948c8dfb95dd..2c8cc4c73903e 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -45,6 +45,7 @@ dependencies { compile 'com.google.http-client:google-http-client:1.23.0' compile 'com.google.http-client:google-http-client-jackson:1.23.0' compile 'com.google.http-client:google-http-client-jackson2:1.23.0' + compile 'com.google.api:gax-httpjson:0.40.0' compile 'com.google.http-client:google-http-client-appengine:1.23.0' compile 'com.google.guava:guava:20.0' compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 5ab627ee73648..f2e2832c11d8f 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -20,6 +20,8 @@ import com.google.api.services.storage.StorageScopes; import com.google.auth.oauth2.ServiceAccountCredentials; + +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -135,7 +137,7 @@ public String getHost() { } public String getProjectId() { - return projectId != null ? projectId : (credential != null ? credential.getProjectId() : null); + return Strings.hasLength(projectId) ? projectId : (credential != null ? credential.getProjectId() : null); } public TimeValue getConnectTimeout() { diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index ce9b0334638a0..b7d8bacba4ec9 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -26,4 +26,6 @@ grant { // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + + //permission java.io.FilePermission "<>", "read"; }; From ee8755ae309688d7b8f446c48c23a2d43e361f87 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 24 Apr 2018 17:55:04 +0300 Subject: [PATCH 14/45] Permissions WIP --- plugins/repository-gcs/build.gradle | 1 - .../gcs/GoogleCloudStorageBlobStore.java | 14 ++++++-------- .../gcs/GoogleCloudStorageService.java | 12 ++++++++++-- .../main/plugin-metadata/plugin-security.policy | 5 +++-- 4 files changed, 19 insertions(+), 13 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 2c8cc4c73903e..39668cd57b151 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -139,5 +139,4 @@ integTestCluster { /* Use a closure on the string to delay evaluation until tests are executed */ setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" - setting 'gcs.client.integration_test.project_id', "integration_test" } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index d436caa1deb83..7cc45e318ab2c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -88,13 +88,11 @@ public void close() { */ boolean doesBucketExist(String bucketName) { try { - return SocketAccess.doPrivilegedIOException(() -> { - final Bucket bucket = storage.get(bucketName); - if (bucket != null) { - return Strings.hasText(bucket.getName()); - } - return false; - }); + final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> storage.get(bucketName)); + if (bucket != null) { + return Strings.hasText(bucket.getName()); + } + return false; } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); } @@ -261,7 +259,7 @@ void deleteBlobs(Collection blobNames) throws IOException { return; } final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); - final List deletedStatuses = storage.delete(blobIdsToDelete); + final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobIdsToDelete)); assert blobIdsToDelete.size() == deletedStatuses.size(); boolean failed = false; for (int i = 0; i < blobIdsToDelete.size(); i++) { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index c82fa565463b0..9d2a13a9e79db 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -19,10 +19,13 @@ package org.elasticsearch.repositories.gcs; +import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.gax.retrying.RetrySettings; import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; + import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; @@ -30,6 +33,8 @@ import org.elasticsearch.env.Environment; import org.threeten.bp.Duration; +import java.io.IOException; +import java.security.GeneralSecurityException; import java.util.Map; public class GoogleCloudStorageService extends AbstractComponent { @@ -57,15 +62,17 @@ public GoogleCloudStorageService(Environment environment, Map netHttpTransport) .build(); final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() .setRetrySettings(retrySettings) @@ -86,7 +93,8 @@ public Storage createClient(String clientName) { if (Strings.hasLength(clientSettings.getProjectId())) { storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } - return storageOptionsBuilder.build().getService(); + final StorageOptions storageOptions = storageOptionsBuilder.build(); + return storageOptions.getService(); } /** diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index b7d8bacba4ec9..b662c7886d4ee 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -20,9 +20,10 @@ grant { permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "setFactory"; + permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - permission java.net.URLPermission "http://www.googleapis.com/*", "*"; - permission java.net.URLPermission "https://www.googleapis.com/*", "*"; + permission java.net.URLPermission "https://www.googleapis.com/-", "*:*"; + permission java.net.URLPermission "https://accounts.google.com/-", "*:*"; // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; From 5a12d761c237417c27e8646b8f1ea852f6a4e9ec Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 25 Apr 2018 14:54:12 +0300 Subject: [PATCH 15/45] Licenses & some deps curated --- plugins/repository-gcs/build.gradle | 50 ++++++------------- .../licenses/gax-httpjson-0.40.0.jar.sha1 | 1 + .../licenses/gax-httpjson-LICENSE.txt | 27 ++++++++++ .../licenses/gax-httpjson-NOTICE.txt | 0 ...GoogleCloudStorageClientSettingsTests.java | 6 +-- 5 files changed, 46 insertions(+), 38 deletions(-) create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 39668cd57b151..30c18a4150d78 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -1,8 +1,3 @@ -import org.elasticsearch.gradle.test.AntFixture - -import java.security.KeyPair -import java.security.KeyPairGenerator - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -22,57 +17,40 @@ import java.security.KeyPairGenerator * under the License. */ +import org.elasticsearch.gradle.test.AntFixture +import java.security.KeyPair +import java.security.KeyPairGenerator + + esplugin { description 'The GCS repository plugin adds Google Cloud Storage support for repositories.' classname 'org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' } -//versions << [ -// 'google': '1.23.0', -//] - dependencies { compile 'com.google.cloud:google-cloud-storage:1.26.0' compile 'com.google.cloud:google-cloud-core:1.26.0' compile 'com.google.cloud:google-cloud-core-http:1.26.0' - compile 'com.google.api-client:google-api-client:1.23.0' compile 'com.google.auth:google-auth-library-oauth2-http:0.9.0' compile 'com.google.auth:google-auth-library-credentials:0.9.0' compile 'com.google.oauth-client:google-oauth-client:1.23.0' - compile 'com.google.api:api-common:1.5.0' - compile 'org.threeten:threetenbp:1.3.6' - compile 'com.google.api:gax:1.23.0' compile 'com.google.http-client:google-http-client:1.23.0' compile 'com.google.http-client:google-http-client-jackson:1.23.0' compile 'com.google.http-client:google-http-client-jackson2:1.23.0' - compile 'com.google.api:gax-httpjson:0.40.0' compile 'com.google.http-client:google-http-client-appengine:1.23.0' + compile 'com.google.api-client:google-api-client:1.23.0' + compile 'com.google.api:gax:1.23.0' + compile 'com.google.api:gax-httpjson:0.40.0' + compile 'com.google.api:api-common:1.5.0' + compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' compile 'com.google.guava:guava:20.0' compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' - compile 'io.opencensus:opencensus-api:0.11.1' - compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' compile 'io.grpc:grpc-context:1.9.0' - compile 'io.opencensus:opencensus-api:0.11.1' - compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' - - //compile 'com.google.protobuf:protobuf-java:3.5.1' - //compile 'com.fasterxml.jackson.core:jackson-core:2.1.3' - //compile 'com.google.api:gax-grpc:1.24.0' - //compile 'org.threeten:threetenbp:1.3.6' - //compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' - //compile 'com.google.auth:google-auth-library-credentials:0.9.1' - - -// compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" -// compile "com.google.api-client:google-api-client:${versions.google}" -// compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" -// compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" -// compile "commons-logging:commons-logging:${versions.commonslogging}" -// compile "commons-codec:commons-codec:${versions.commonscodec}" -// compile "com.google.http-client:google-http-client:${versions.google}" -// compile "com.google.http-client:google-http-client-jackson2:${versions.google}" + compile 'io.opencensus:opencensus-api:0.11.1' + compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' + compile 'org.threeten:threetenbp:1.3.6' } dependencyLicenses { @@ -84,6 +62,7 @@ dependencyLicenses { thirdPartyAudit.excludes = [ // classes are missing + //'com.google.appengine.api' //'com.google.common.base.Splitter', //'com.google.common.collect.Lists', //'javax.servlet.ServletContextEvent', @@ -103,6 +82,7 @@ forbiddenApisTest { task googleCloudStorageFixture(type: AntFixture) { dependsOn compileTestJava env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" + env 'ES_JAVA_OPTS', "-Djava.security.debug=access,failure" executable = new File(project.runtimeJavaHome, 'bin/java') args 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', baseDir, 'bucket_test' } diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 new file mode 100644 index 0000000000000..c251ea1dd956c --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 @@ -0,0 +1 @@ +cb4bafbfd45b9d24efbb6138a31e37918fac015f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt b/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt new file mode 100644 index 0000000000000..267561bb386de --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt b/plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index be8a2749a4700..d2ef577982f11 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.repositories.gcs; +import com.google.api.services.storage.StorageScopes; import com.google.auth.oauth2.ServiceAccountCredentials; import org.elasticsearch.common.collect.Tuple; @@ -32,6 +33,7 @@ import java.security.KeyPairGenerator; import java.util.ArrayList; import java.util.Base64; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -178,14 +180,13 @@ private static GoogleCloudStorageClientSettings randomClient(final String client /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ private static Tuple randomCredential(final String clientName) throws Exception { final KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); - final ServiceAccountCredentials.Builder credentialBuilder = ServiceAccountCredentials.newBuilder(); credentialBuilder.setClientId("id_" + clientName); credentialBuilder.setClientEmail(clientName); credentialBuilder.setProjectId("project_id_" + clientName); credentialBuilder.setPrivateKey(keyPair.getPrivate()); credentialBuilder.setPrivateKeyId("private_key_id_" + clientName); - + credentialBuilder.setScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); final String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); final String serviceAccount = "{\"type\":\"service_account\"," + "\"project_id\":\"project_id_" + clientName + "\"," + @@ -201,7 +202,6 @@ private static Tuple randomCredential(final S "\"client_x509_cert_url\":\"https://www.googleapis.com/robot/v1/metadata/x509/" + clientName + "%40appspot.gserviceaccount.com\"}"; - return Tuple.tuple(credentialBuilder.build(), serviceAccount.getBytes(StandardCharsets.UTF_8)); } From a5eb4c8b6eac96e610c51ca8cd03d7e0de9709a1 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 25 Apr 2018 17:58:11 +0300 Subject: [PATCH 16/45] Client initializer unit tests --- plugins/repository-gcs/build.gradle | 1 - .../gcs/GoogleCloudStorageBlobStore.java | 5 +- .../gcs/GoogleCloudStorageClientSettings.java | 1 + .../gcs/GoogleCloudStoragePlugin.java | 23 ---- .../gcs/GoogleCloudStorageService.java | 3 +- .../gcs/GoogleCloudStorageServiceTests.java | 106 ++++++++---------- 6 files changed, 54 insertions(+), 85 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 30c18a4150d78..5265aa1188678 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -82,7 +82,6 @@ forbiddenApisTest { task googleCloudStorageFixture(type: AntFixture) { dependsOn compileTestJava env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" - env 'ES_JAVA_OPTS', "-Djava.security.debug=access,failure" executable = new File(project.runtimeJavaHome, 'bin/java') args 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', baseDir, 'bucket_test' } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 7cc45e318ab2c..89ee953830b6b 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -172,10 +172,10 @@ InputStream readBlob(String blobName) throws IOException { long bytesRemaining = readerAndSize.v2(); @SuppressForbidden(reason = "the reader channel is backed by a socket instead of a file," - + "ie. it is not seekable and reading should advance the readers's internal position") + + "ie. it is not seekable and reading should advance the channel's internal position") @Override public int read() throws IOException { - while (true) { + while (bytesRemaining > 0) { try { return (0xFF & buffer.get()); } catch (final BufferUnderflowException e) { @@ -192,6 +192,7 @@ public int read() throws IOException { // retry in case of non-blocking socket } } + return -1; } @Override diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index f2e2832c11d8f..a1b9544a7d675 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -58,6 +58,7 @@ public class GoogleCloudStorageClientSettings { /** An override for the Storage host name to connect to. */ static final Setting.AffixSetting HOST_SETTING = Setting.affixKeySetting(PREFIX, "host", key -> { + // falback to the deprecated setting if (key.endsWith("host")) { return Setting.simpleString(key, ENDPOINT_SETTING.getConcreteSetting(key.substring(0, key.length() - "host".length()) + "endpoint"), diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index ec0df08f2a6ac..4a770f477852e 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -78,29 +78,6 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin Data.nullOf(GoogleJsonError.ErrorInfo.class); ClassInfo.of(GoogleJsonError.class, false); - // Data.nullOf(Bucket.Cors.class); - // ClassInfo.of(Bucket.class, false); - // ClassInfo.of(Bucket.Cors.class, false); - // ClassInfo.of(Bucket.Lifecycle.class, false); - // ClassInfo.of(Bucket.Logging.class, false); - // ClassInfo.of(Bucket.Owner.class, false); - // ClassInfo.of(Bucket.Versioning.class, false); - // ClassInfo.of(Bucket.Website.class, false); - // - // ClassInfo.of(StorageObject.class, false); - // ClassInfo.of(StorageObject.Owner.class, false); - // - // ClassInfo.of(Objects.class, false); - // - // ClassInfo.of(Storage.Buckets.Get.class, false); - // ClassInfo.of(Storage.Buckets.Insert.class, false); - // - // ClassInfo.of(Storage.Objects.Get.class, false); - // ClassInfo.of(Storage.Objects.Insert.class, false); - // ClassInfo.of(Storage.Objects.Delete.class, false); - // ClassInfo.of(Storage.Objects.Copy.class, false); - // ClassInfo.of(Storage.Objects.List.class, false); - return null; }); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 9d2a13a9e79db..4418c1d0b9c1d 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -93,8 +93,7 @@ public Storage createClient(String clientName) throws GeneralSecurityException, if (Strings.hasLength(clientSettings.getProjectId())) { storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } - final StorageOptions storageOptions = storageOptionsBuilder.build(); - return storageOptions.getService(); + return storageOptionsBuilder.build().getService(); } /** diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 3a8082623d656..b5891e1159f96 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -19,68 +19,60 @@ package org.elasticsearch.repositories.gcs; +import com.google.auth.Credentials; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.Storage; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import java.io.IOException; +import java.security.GeneralSecurityException; +import java.util.Collections; +import java.util.Locale; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class GoogleCloudStorageServiceTests extends ESTestCase { - /** - * Test that the {@link GoogleCloudStorageService.DefaultHttpRequestInitializer} attaches new instances - * of {@link HttpIOExceptionHandler} and {@link HttpUnsuccessfulResponseHandler} for every HTTP requests. - */ - // public void testDefaultHttpRequestInitializer() throws IOException { - // final Environment environment = mock(Environment.class); - // when(environment.settings()).thenReturn(Settings.EMPTY); - // - // final GoogleCredential credential = mock(GoogleCredential.class); - // when(credential.handleResponse(any(HttpRequest.class), - // any(HttpResponse.class), anyBoolean())).thenReturn(false); - // - // final TimeValue readTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, - // 120)); - // final TimeValue connectTimeout = - // TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - // final String endpoint = randomBoolean() ? randomAlphaOfLength(10) : null; - // final String applicationName = randomBoolean() ? randomAlphaOfLength(10) : - // null; - // - // final GoogleCloudStorageClientSettings clientSettings = - // new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, - // readTimeout, applicationName); - // - // final HttpRequestInitializer initializer = - // GoogleCloudStorageService.createRequestInitializer(clientSettings); - // final HttpRequestFactory requestFactory = new - // MockHttpTransport().createRequestFactory(initializer); - // - // final HttpRequest request1 = requestFactory.buildGetRequest(new - // GenericUrl()); - // assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); - // assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); - // assertSame(credential, request1.getInterceptor()); - // assertNotNull(request1.getIOExceptionHandler()); - // assertNotNull(request1.getUnsuccessfulResponseHandler()); - // - // final HttpRequest request2 = requestFactory.buildGetRequest(new - // GenericUrl()); - // assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); - // assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); - // assertSame(request1.getInterceptor(), request2.getInterceptor()); - // assertNotNull(request2.getIOExceptionHandler()); - // assertNotSame(request1.getIOExceptionHandler(), - // request2.getIOExceptionHandler()); - // assertNotNull(request2.getUnsuccessfulResponseHandler()); - // assertNotSame(request1.getUnsuccessfulResponseHandler(), - // request2.getUnsuccessfulResponseHandler()); - // - // request1.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - // verify(credential, times(1)).handleResponse(any(HttpRequest.class), - // any(HttpResponse.class), anyBoolean()); - // - // request2.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - // verify(credential, times(2)).handleResponse(any(HttpRequest.class), - // any(HttpResponse.class), anyBoolean()); - // } + public void testClientInitializer() throws GeneralSecurityException, IOException { + final String clientName = randomAlphaOfLength(4).toLowerCase(Locale.ROOT); + final Environment environment = mock(Environment.class); + final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final String applicationName = randomAlphaOfLength(4); + final String hostName = randomAlphaOfLength(4); + final String projectIdName = randomAlphaOfLength(4); + final Settings settings = Settings.builder() + .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), connectTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), readTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName) + .put(GoogleCloudStorageClientSettings.HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) + .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) + .build(); + when(environment.settings()).thenReturn(settings); + final GoogleCloudStorageClientSettings clientSettings = GoogleCloudStorageClientSettings.getClientSettings(settings, clientName); + final GoogleCloudStorageService service = new GoogleCloudStorageService(environment, + Collections.singletonMap(clientName, clientSettings)); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.createClient("another_client")); + assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); + assertSettingDeprecationsAndWarnings( + new Setting[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) }); + final Storage storage = service.createClient(clientName); + assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); + assertThat(storage.getOptions().getHost(), Matchers.is(hostName)); + assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); + assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); + assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), + Matchers.is((int) connectTimeValue.millis())); + assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getReadTimeout(), + Matchers.is((int) readTimeValue.millis())); + assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); + } public void testToTimeout() { assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); From 63dee215bf3492adb0d814f96366c92678881e4e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 26 Apr 2018 00:24:58 +0300 Subject: [PATCH 17/45] Remove cached field prefetching --- .../gcs/GoogleCloudStorageBlobStore.java | 7 ++- .../gcs/GoogleCloudStorageClientSettings.java | 15 +++--- .../gcs/GoogleCloudStoragePlugin.java | 48 ------------------- .../plugin-metadata/plugin-security.policy | 2 + 4 files changed, 16 insertions(+), 56 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 89ee953830b6b..547b68276ce78 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -175,10 +175,13 @@ InputStream readBlob(String blobName) throws IOException { + "ie. it is not seekable and reading should advance the channel's internal position") @Override public int read() throws IOException { - while (bytesRemaining > 0) { + while (true) { try { return (0xFF & buffer.get()); } catch (final BufferUnderflowException e) { + if (bytesRemaining == 0) { + return -1; + } // pull another chunck buffer.clear(); final long bytesRead = SocketAccess.doPrivilegedIOException(() -> readerAndSize.v1().read(buffer)); @@ -189,10 +192,10 @@ public int read() throws IOException { return -1; } bytesRemaining -= bytesRead; + assert bytesRemaining >= 0; // retry in case of non-blocking socket } } - return -1; } @Override diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index a1b9544a7d675..9f98dff6b3835 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -30,6 +30,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -197,12 +198,14 @@ static ServiceAccountCredentials loadCredential(final Settings settings, final S return null; } try (InputStream credStream = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).get(settings)) { - final ServiceAccountCredentials credentials = ServiceAccountCredentials.fromStream(credStream); - if (credentials.createScopedRequired()) { - return (ServiceAccountCredentials) credentials - .createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - } - return credentials; + final Collection scopes = Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL); + return SocketAccess.doPrivilegedIOException(() -> { + final ServiceAccountCredentials credentials = ServiceAccountCredentials.fromStream(credStream); + if (credentials.createScopedRequired()) { + return (ServiceAccountCredentials) credentials.createScoped(scopes); + } + return credentials; + }); } } catch (final IOException e) { throw new UncheckedIOException(e); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 4a770f477852e..16d7f654012c2 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -19,18 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.auth.oauth2.TokenRequest; -import com.google.api.client.auth.oauth2.TokenResponse; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.json.GenericJson; -import com.google.api.client.json.webtoken.JsonWebSignature; -import com.google.api.client.json.webtoken.JsonWebToken; -import com.google.api.client.util.ClassInfo; -import com.google.api.client.util.Data; - -import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -39,8 +27,6 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -48,40 +34,6 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { - static { - /* - * Google HTTP client changes access levels because its silly and we can't allow - * that on any old stack stack so we pull it here, up front, so we can cleanly - * check the permissions for it. Without this changing the permission can fail - * if any part of core is on the stack because our plugin permissions don't - * allow core to "reach through" plugins to change the permission. Because - * that'd be silly. - */ - SpecialPermission.check(); - AccessController.doPrivileged((PrivilegedAction) () -> { - // ClassInfo put in cache all the fields of a given class - // that are annoted with @Key; at the same time it changes - // the field access level using setAccessible(). Calling - // them here put the ClassInfo in cache (they are never evicted) - // before the SecurityManager is installed. - ClassInfo.of(HttpHeaders.class, true); - - ClassInfo.of(JsonWebSignature.Header.class, false); - ClassInfo.of(JsonWebToken.Payload.class, false); - - ClassInfo.of(TokenRequest.class, false); - ClassInfo.of(TokenResponse.class, false); - - ClassInfo.of(GenericJson.class, false); - ClassInfo.of(GenericUrl.class, false); - - Data.nullOf(GoogleJsonError.ErrorInfo.class); - ClassInfo.of(GoogleJsonError.class, false); - - return null; - }); - } - private final Map clientsSettings; public GoogleCloudStoragePlugin(final Settings settings) { diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index b662c7886d4ee..bb00616b1af02 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -28,5 +28,7 @@ grant { // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + //add read file permissions for the gcloud credentials + //eg. new File(System.getProperty("user.home"), ".config/gcloud") //permission java.io.FilePermission "<>", "read"; }; From 64bc96759243b919ec5e18255bd5473c70afbb4b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Wed, 2 May 2018 16:25:09 +0300 Subject: [PATCH 18/45] Messy clean-up --- plugins/repository-gcs/build.gradle | 14 ++- .../gcs/GoogleCloudStorageBlobStore.java | 97 +++++++------------ .../gcs/GoogleCloudStorageClientSettings.java | 19 +++- .../gcs/GoogleCloudStoragePlugin.java | 3 +- .../gcs/GoogleCloudStorageService.java | 30 +++--- .../plugin-metadata/plugin-security.policy | 4 - ...GoogleCloudStorageClientSettingsTests.java | 3 +- 7 files changed, 79 insertions(+), 91 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 5265aa1188678..12a411a40f803 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -81,6 +81,7 @@ forbiddenApisTest { /** A task to start the GoogleCloudStorageFixture which emulates a Google Cloud Storage service **/ task googleCloudStorageFixture(type: AntFixture) { dependsOn compileTestJava + args '-Djavax.net.debug=all' env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') args 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', baseDir, 'bucket_test' @@ -103,19 +104,22 @@ task createServiceAccountFile() { ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + - ' "client_id": "123456789101112130594",\n' + - " \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + - " \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + - ' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + - ' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + + ' "client_id": "123456789101112130594"\n' + '}', 'UTF-8') } } +//" \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + +//" \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + +//' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + +//' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + + integTestCluster { dependsOn createServiceAccountFile, googleCloudStorageFixture + systemProperty 'javax.net.debug', 'all' keystoreFile 'gcs.client.integration_test.credentials_file', "${serviceAccountFile.absolutePath}" /* Use a closure on the string to delay evaluation until tests are executed */ setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.token_uri', "http://${ -> googleCloudStorageFixture.addressAndPort }/o/oauth2/token" } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 547b68276ce78..a4b38460ddf99 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -30,7 +30,6 @@ import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.Storage.CopyRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; @@ -38,14 +37,15 @@ import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.io.Channels; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.Streams; + import java.io.IOException; import java.io.InputStream; -import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; import java.nio.file.NoSuchFileException; import java.util.Collection; import java.util.List; @@ -154,55 +154,27 @@ boolean blobExists(String blobName) throws IOException { */ InputStream readBlob(String blobName) throws IOException { final BlobId blobId = BlobId.of(bucket, blobName); - final Tuple readerAndSize = SocketAccess.doPrivilegedIOException(() -> - { - final Blob blob = storage.get(blobId); - if (blob == null) { - return null; - } - return new Tuple<>(blob.reader(), blob.getSize()); - }); - if (readerAndSize == null) { + final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + if (blob == null) { throw new NoSuchFileException("Blob [" + blobName + "] does not exit."); } - final ByteBuffer buffer = ByteBuffer.allocate(8 * 1024); - // first read pull data - buffer.flip(); - return new InputStream() { - long bytesRemaining = readerAndSize.v2(); + final ReadChannel readChannel = SocketAccess.doPrivilegedIOException(blob::reader); + return java.nio.channels.Channels.newInputStream(new ReadableByteChannel() { + @Override + public int read(ByteBuffer dst) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> readChannel.read(dst)); + } - @SuppressForbidden(reason = "the reader channel is backed by a socket instead of a file," - + "ie. it is not seekable and reading should advance the channel's internal position") @Override - public int read() throws IOException { - while (true) { - try { - return (0xFF & buffer.get()); - } catch (final BufferUnderflowException e) { - if (bytesRemaining == 0) { - return -1; - } - // pull another chunck - buffer.clear(); - final long bytesRead = SocketAccess.doPrivilegedIOException(() -> readerAndSize.v1().read(buffer)); - buffer.flip(); - if (bytesRead < 0) { - return -1; - } else if ((bytesRead == 0) && (bytesRemaining == 0)) { - return -1; - } - bytesRemaining -= bytesRead; - assert bytesRemaining >= 0; - // retry in case of non-blocking socket - } - } + public boolean isOpen() { + return readChannel.isOpen(); } @Override public void close() throws IOException { - readerAndSize.v1().close(); + SocketAccess.doPrivilegedVoidIOException(readChannel::close); } - }; + }); } /** @@ -213,22 +185,23 @@ public void close() throws IOException { */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); - final byte[] buffer = new byte[64 * 1024]; - SocketAccess.doPrivilegedVoidIOException(() -> { - long bytesWritten = 0; - try (WriteChannel writer = storage.writer(blobInfo)) { - int limit; - while ((limit = inputStream.read(buffer)) >= 0) { - try { - Channels.writeToChannel(buffer, 0, limit, writer); - bytesWritten += limit; - } catch (final Exception e) { - throw new IOException("Failed to write blob [" + blobName + "] into bucket [" + bucket + "].", e); - } - } + final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); + Streams.copy(inputStream, java.nio.channels.Channels.newOutputStream(new WritableByteChannel() { + @Override + public boolean isOpen() { + return writeChannel.isOpen(); } - assert blobSize == bytesWritten : "InputStream unexpected size, expected [" + blobSize + "] got [" + bytesWritten + "]"; - }); + + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(writeChannel::close); + } + + @Override + public int write(ByteBuffer src) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + } + })); } /** @@ -281,7 +254,7 @@ void deleteBlobs(Collection blobNames) throws IOException { * Moves a blob within the same bucket * * @param sourceBlob name of the blob to move - * @param targetBlob new name of the blob in the target bucket + * @param targetBlob new name of the blob in the same bucket */ void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { final BlobId sourceBlobId = BlobId.of(bucket, sourceBlobName); @@ -293,9 +266,9 @@ void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { SocketAccess.doPrivilegedVoidIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete final CopyWriter copyWriter = storage.copy(request); - final Blob destBlob = copyWriter.getResult(); + copyWriter.getResult(); final boolean deleted = storage.delete(sourceBlobId); - if ((deleted == false) || (destBlob.reload() == null)) { + if (deleted == false) { throw new IOException("Failed to move source [" + sourceBlobName + "] to target [" + targetBlobName + "]."); } }); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 9f98dff6b3835..01b3dcd06d846 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -30,6 +30,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.net.URI; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -73,6 +74,10 @@ public class GoogleCloudStorageClientSettings { static final Setting.AffixSetting PROJECT_ID_SETTING = Setting.affixKeySetting(PREFIX, "project_id", key -> Setting.simpleString(key, Setting.Property.NodeScope)); + /** An override for the Token Server URI in the oauth flow. */ + static final Setting.AffixSetting TOKEN_URI_SETTING = Setting.affixKeySetting(PREFIX, "token_uri", + key -> new Setting<>(key, "", URI::create, Setting.Property.NodeScope)); + /** * The timeout to establish a connection. A value of {@code -1} corresponds to an infinite timeout. A value of {@code 0} * corresponds to the default timeout of the Google Cloud Storage Java Library. @@ -116,18 +121,23 @@ public class GoogleCloudStorageClientSettings { /** The Storage client application name **/ private final String applicationName; + /** The token server URI. This leases access tokens in the oauth flow. **/ + private final URI tokenUri; + GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, final String host, final String projectId, final TimeValue connectTimeout, final TimeValue readTimeout, - final String applicationName) { + final String applicationName, + final URI tokenUri) { this.credential = credential; this.host = host; this.projectId = projectId; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; this.applicationName = applicationName; + this.tokenUri = tokenUri; } public ServiceAccountCredentials getCredential() { @@ -154,6 +164,10 @@ public String getApplicationName() { return applicationName; } + public URI getTokenUri() { + return tokenUri; + } + public static Map load(final Settings settings) { final Map clients = new HashMap<>(); for (final String clientName: settings.getGroups(PREFIX).keySet()) { @@ -174,7 +188,8 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting getConfigValue(settings, clientName, PROJECT_ID_SETTING), getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), - getConfigValue(settings, clientName, APPLICATION_NAME_SETTING) + getConfigValue(settings, clientName, APPLICATION_NAME_SETTING), + getConfigValue(settings, clientName, TOKEN_URI_SETTING) ); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 16d7f654012c2..7d232506e2e57 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -64,6 +64,7 @@ public List> getSettings() { GoogleCloudStorageClientSettings.PROJECT_ID_SETTING, GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, - GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING); + GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING, + GoogleCloudStorageClientSettings.TOKEN_URI_SETTING); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 4418c1d0b9c1d..2908007cc432b 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -21,7 +21,7 @@ import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; -import com.google.api.gax.retrying.RetrySettings; +import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; @@ -31,8 +31,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; -import org.threeten.bp.Duration; - import java.io.IOException; import java.security.GeneralSecurityException; import java.util.Map; @@ -41,18 +39,10 @@ public class GoogleCloudStorageService extends AbstractComponent { /** Clients settings identified by client name. */ private final Map clientsSettings; - private final RetrySettings retrySettings; public GoogleCloudStorageService(Environment environment, Map clientsSettings) { super(environment.settings()); this.clientsSettings = clientsSettings; - this.retrySettings = RetrySettings.newBuilder() - .setInitialRetryDelay(Duration.ofMillis(100)) - .setMaxRetryDelay(Duration.ofMillis(6000)) - .setTotalTimeout(Duration.ofMillis(900000)) - .setRetryDelayMultiplier(1.5d) - .setJittered(true) - .build(); } /** @@ -62,7 +52,7 @@ public GoogleCloudStorageService(Environment environment, Map netHttpTransport) .build(); final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() - .setRetrySettings(retrySettings) .setTransportOptions(httpTransportOptions) .setHeaderProvider(() -> { final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); @@ -87,12 +76,21 @@ public Storage createClient(String clientName) throws GeneralSecurityException, if (Strings.hasLength(clientSettings.getHost())) { storageOptionsBuilder.setHost(clientSettings.getHost()); } - if (clientSettings.getCredential() != null) { - storageOptionsBuilder.setCredentials(clientSettings.getCredential()); - } if (Strings.hasLength(clientSettings.getProjectId())) { storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } + if (clientSettings.getCredential() == null) { + logger.warn("Application Default Credentials are not supported out of the box." + + " Additional file system permissions have to be granted to the plugin."); + } else { + final ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); + if (Strings.hasLength(clientSettings.getTokenUri().toString())) { + storageOptionsBuilder + .setCredentials(serviceAccountCredentials.toBuilder().setTokenServerUri(clientSettings.getTokenUri()).build()); + } else { + storageOptionsBuilder.setCredentials(serviceAccountCredentials); + } + } return storageOptionsBuilder.build().getService(); } diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index bb00616b1af02..b46eef0744779 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -27,8 +27,4 @@ grant { // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; - - //add read file permissions for the gcloud credentials - //eg. new File(System.getProperty("user.home"), ".config/gcloud") - //permission java.io.FilePermission "<>", "read"; }; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index d2ef577982f11..b731d5dd507c0 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.security.KeyPair; import java.security.KeyPairGenerator; @@ -174,7 +175,7 @@ private static GoogleCloudStorageClientSettings randomClient(final String client applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); } - return new GoogleCloudStorageClientSettings(credential, host, projectId, connectTimeout, readTimeout, applicationName); + return new GoogleCloudStorageClientSettings(credential, host, projectId, connectTimeout, readTimeout, applicationName, new URI("")); } /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ From d136602c248519726b8641ab7d1c521d898a3782 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 09:27:18 +0300 Subject: [PATCH 19/45] fixture check bucket --- .../repositories/gcs/GoogleCloudStorageTestServer.java | 1 + 1 file changed, 1 insertion(+) diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 2330e230f4505..06158ff75ff8d 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -565,6 +565,7 @@ private static Response newError(final RestStatus status, final String message) private static XContentBuilder buildBucketResource(final String name) throws IOException { return jsonBuilder().startObject() .field("kind", "storage#bucket") + .field("name", name) .field("id", name) .endObject(); } From 46e9ed6dc16b734fb9feaae2c1fc16e4dedb7521 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 09:32:43 +0300 Subject: [PATCH 20/45] clean-up build.gradle --- plugins/repository-gcs/build.gradle | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 12a411a40f803..97cfa35fcae31 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -62,14 +62,6 @@ dependencyLicenses { thirdPartyAudit.excludes = [ // classes are missing - //'com.google.appengine.api' - //'com.google.common.base.Splitter', - //'com.google.common.collect.Lists', - //'javax.servlet.ServletContextEvent', - //'javax.servlet.ServletContextListener', - //'org.apache.avalon.framework.logger.Logger', - //'org.apache.log.Hierarchy', - //'org.apache.log.Logger', ] forbiddenApisTest { @@ -81,7 +73,6 @@ forbiddenApisTest { /** A task to start the GoogleCloudStorageFixture which emulates a Google Cloud Storage service **/ task googleCloudStorageFixture(type: AntFixture) { dependsOn compileTestJava - args '-Djavax.net.debug=all' env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') args 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', baseDir, 'bucket_test' @@ -109,14 +100,8 @@ task createServiceAccountFile() { } } -//" \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + -//" \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + -//' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + -//' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + - integTestCluster { dependsOn createServiceAccountFile, googleCloudStorageFixture - systemProperty 'javax.net.debug', 'all' keystoreFile 'gcs.client.integration_test.credentials_file', "${serviceAccountFile.absolutePath}" /* Use a closure on the string to delay evaluation until tests are executed */ From 59e1a83e88d403f19829905806fc93e221b81aa3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 10:08:13 +0300 Subject: [PATCH 21/45] post merge fixes --- plugins/repository-gcs/build.gradle | 47 ------------------- .../qa/google-cloud-storage/build.gradle | 9 ++-- 2 files changed, 3 insertions(+), 53 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 8876914043ccd..b0f089f82fc2e 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -64,54 +64,7 @@ thirdPartyAudit.excludes = [ // classes are missing ] -<<<<<<< HEAD -forbiddenApisTest { - // we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage - bundledSignatures -= 'jdk-non-portable' - bundledSignatures += 'jdk-internal' -} - -/** A task to start the GoogleCloudStorageFixture which emulates a Google Cloud Storage service **/ -task googleCloudStorageFixture(type: AntFixture) { - dependsOn compileTestJava - env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" - executable = new File(project.runtimeJavaHome, 'bin/java') - args 'org.elasticsearch.repositories.gcs.GoogleCloudStorageFixture', baseDir, 'bucket_test' -} - -/** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ -File serviceAccountFile = new File(project.buildDir, "generated-resources/service_account_test.json") -task createServiceAccountFile() { - dependsOn googleCloudStorageFixture - doLast { - KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") - keyPairGenerator.initialize(1024) - KeyPair keyPair = keyPairGenerator.generateKeyPair() - String encodedKey = Base64.getEncoder().encodeToString(keyPair.private.getEncoded()) - - serviceAccountFile.parentFile.mkdirs() - serviceAccountFile.setText("{\n" + - ' "type": "service_account",\n' + - ' "project_id": "integration_test",\n' + - ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + - ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + - ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + - ' "client_id": "123456789101112130594"\n' + - '}', 'UTF-8') - } -} - -integTestCluster { - dependsOn createServiceAccountFile, googleCloudStorageFixture - keystoreFile 'gcs.client.integration_test.credentials_file', "${serviceAccountFile.absolutePath}" - - /* Use a closure on the string to delay evaluation until tests are executed */ - setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" - setting 'gcs.client.integration_test.token_uri', "http://${ -> googleCloudStorageFixture.addressAndPort }/o/oauth2/token" -} -======= check { // also execute the QA tests when testing the plugin dependsOn 'qa:google-cloud-storage:check' } ->>>>>>> master diff --git a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle index afd49b9f4dc73..942a4fb31e80b 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle +++ b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle @@ -83,11 +83,7 @@ task createServiceAccountFile() { ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + - ' "client_id": "123456789101112130594",\n' + - " \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + - " \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + - ' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + - ' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + + ' "client_id": "123456789101112130594"\n' + '}', 'UTF-8') } } @@ -108,7 +104,8 @@ integTestCluster { if (useFixture) { dependsOn createServiceAccountFile, googleCloudStorageFixture /* Use a closure on the string to delay evaluation until tests are executed */ - setting 'gcs.client.integration_test.endpoint', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.token_uri', "http://${ -> googleCloudStorageFixture.addressAndPort }/o/oauth2/token" } else { println "Using an external service to test the repository-gcs plugin" } From 3f3db5ef826d91041cb24b794a9d4c87233ea122 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 11:43:22 +0300 Subject: [PATCH 22/45] Wtf? integ tests work? --- plugins/repository-gcs/build.gradle | 25 ++++++++++++++----- .../licenses/gax-1.23.0.jar.sha1 | 1 - .../licenses/gax-1.25.0.jar.sha1 | 1 + ...le-auth-library-credentials-0.9.0.jar.sha1 | 1 - ...le-auth-library-credentials-0.9.1.jar.sha1 | 1 + ...le-auth-library-oauth2-http-0.9.0.jar.sha1 | 1 - ...le-auth-library-oauth2-http-0.9.1.jar.sha1 | 1 + .../google-cloud-core-1.26.0.jar.sha1 | 1 - .../google-cloud-core-1.28.0.jar.sha1 | 1 + .../google-cloud-core-http-1.26.0.jar.sha1 | 1 - .../google-cloud-core-http-1.28.0.jar.sha1 | 1 + .../google-cloud-storage-1.26.0.jar.sha1 | 1 - .../google-cloud-storage-1.28.0.jar.sha1 | 1 + .../gcs/GoogleCloudStorageBlobStore.java | 3 +++ .../gcs/GoogleCloudStorageServiceTests.java | 9 ++++--- 15 files changed, 34 insertions(+), 15 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index b0f089f82fc2e..4ac488f78f557 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -28,18 +28,18 @@ esplugin { } dependencies { - compile 'com.google.cloud:google-cloud-storage:1.26.0' - compile 'com.google.cloud:google-cloud-core:1.26.0' - compile 'com.google.cloud:google-cloud-core-http:1.26.0' - compile 'com.google.auth:google-auth-library-oauth2-http:0.9.0' - compile 'com.google.auth:google-auth-library-credentials:0.9.0' + compile 'com.google.cloud:google-cloud-storage:1.28.0' + compile 'com.google.cloud:google-cloud-core:1.28.0' + compile 'com.google.cloud:google-cloud-core-http:1.28.0' + compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' + compile 'com.google.auth:google-auth-library-credentials:0.9.1' compile 'com.google.oauth-client:google-oauth-client:1.23.0' compile 'com.google.http-client:google-http-client:1.23.0' compile 'com.google.http-client:google-http-client-jackson:1.23.0' compile 'com.google.http-client:google-http-client-jackson2:1.23.0' compile 'com.google.http-client:google-http-client-appengine:1.23.0' compile 'com.google.api-client:google-api-client:1.23.0' - compile 'com.google.api:gax:1.23.0' + compile 'com.google.api:gax:1.25.0' compile 'com.google.api:gax-httpjson:0.40.0' compile 'com.google.api:api-common:1.5.0' compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' @@ -62,6 +62,19 @@ dependencyLicenses { thirdPartyAudit.excludes = [ // classes are missing + 'com.google.appengine.api.datastore.Blob', + 'com.google.appengine.api.datastore.DatastoreService', + 'com.google.appengine.api.datastore.DatastoreServiceFactory', + 'com.google.appengine.api.datastore.Entity', + 'com.google.appengine.api.datastore.Key', + 'com.google.appengine.api.datastore.KeyFactory', + 'com.google.appengine.api.datastore.PreparedQuery', + 'com.google.appengine.api.datastore.Query', + 'com.google.appengine.api.memcache.Expiration', + 'com.google.appengine.api.memcache.MemcacheService', + 'com.google.appengine.api.memcache.MemcacheServiceFactory', + 'com.google.appengine.api.urlfetch.FetchOptions$Builder', + 'com.google.appengine.api.urlfetch.FetchOptions' ] check { diff --git a/plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 deleted file mode 100644 index 30dae6ac90110..0000000000000 --- a/plugins/repository-gcs/licenses/gax-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09cbdb558449d6fc16667043c31c541b8d02ace4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 new file mode 100644 index 0000000000000..594177047c140 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 @@ -0,0 +1 @@ +36ab73c0b5d4a67447eb89a3174cc76ced150bd1 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 deleted file mode 100644 index bd36d59b66624..0000000000000 --- a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e2b181feff6005c9cbc6f5c1c1e2d3ec9138d46 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 new file mode 100644 index 0000000000000..0922a53d2e356 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 @@ -0,0 +1 @@ +25e0f45f3b3d1b4fccc8944845e51a7a4f359652 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 deleted file mode 100644 index 143c00920204c..0000000000000 --- a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -04e6152c3aead24148627e84f5651e79698c00d9 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 new file mode 100644 index 0000000000000..100a44c187218 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 @@ -0,0 +1 @@ +c0fe3a39b0f28d59de1986b3c50f018cd7cb9ec2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 deleted file mode 100644 index aa2095bb7f4aa..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-1.26.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5a65c299210381c62043d284f6dec0ccaacac19e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..071533f227839 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 @@ -0,0 +1 @@ +c0e88c78ce17c92d76bf46345faf3fa68833b216 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 deleted file mode 100644 index 0ec125ff07bd2..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-http-1.26.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0edc507afb9970900787d1204a1ee894b88abf06 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..fed3fc257c32c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 @@ -0,0 +1 @@ +7b4559a9513abd98da50958c56a10f8ae00cb0f7 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 deleted file mode 100644 index ea8aca863e065..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-storage-1.26.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2752a91ffd8ca767942be823390a620791812e9c \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..f49152ea05646 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 @@ -0,0 +1 @@ +226019ae816b42c59f1b06999aeeb73722b87200 \ No newline at end of file diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index a4b38460ddf99..080e531b9f38e 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -30,6 +30,7 @@ import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.Storage.CopyRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; @@ -160,6 +161,7 @@ InputStream readBlob(String blobName) throws IOException { } final ReadChannel readChannel = SocketAccess.doPrivilegedIOException(blob::reader); return java.nio.channels.Channels.newInputStream(new ReadableByteChannel() { + @SuppressForbidden(reason = "Channel is based of a socket not a file.") @Override public int read(ByteBuffer dst) throws IOException { return SocketAccess.doPrivilegedIOException(() -> readChannel.read(dst)); @@ -197,6 +199,7 @@ public void close() throws IOException { SocketAccess.doPrivilegedVoidIOException(writeChannel::close); } + @SuppressForbidden(reason = "Channel is based of a socket not a file.") @Override public int write(ByteBuffer src) throws IOException { return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index b5891e1159f96..00bd1b8c1ca44 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -48,9 +48,12 @@ public void testClientInitializer() throws GeneralSecurityException, IOException final String hostName = randomAlphaOfLength(4); final String projectIdName = randomAlphaOfLength(4); final Settings settings = Settings.builder() - .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), connectTimeValue.getStringRep()) - .put(GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), readTimeValue.getStringRep()) - .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName) + .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + connectTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + readTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + applicationName) .put(GoogleCloudStorageClientSettings.HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); From d1a69791afc5c1732d9f8100821721577ec2e112 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 15:58:33 +0300 Subject: [PATCH 23/45] Check passes --- plugins/repository-gcs/build.gradle | 153 +++++++++++++++++- .../gcs/GoogleCloudStorageService.java | 2 +- 2 files changed, 153 insertions(+), 2 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 4ac488f78f557..d38e5496fa977 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -61,6 +61,18 @@ dependencyLicenses { } thirdPartyAudit.excludes = [ + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', // classes are missing 'com.google.appengine.api.datastore.Blob', 'com.google.appengine.api.datastore.DatastoreService', @@ -74,7 +86,146 @@ thirdPartyAudit.excludes = [ 'com.google.appengine.api.memcache.MemcacheService', 'com.google.appengine.api.memcache.MemcacheServiceFactory', 'com.google.appengine.api.urlfetch.FetchOptions$Builder', - 'com.google.appengine.api.urlfetch.FetchOptions' + 'com.google.appengine.api.urlfetch.FetchOptions', + 'com.google.appengine.api.urlfetch.HTTPHeader', + 'com.google.appengine.api.urlfetch.HTTPMethod', + 'com.google.appengine.api.urlfetch.HTTPRequest', + 'com.google.appengine.api.urlfetch.HTTPResponse', + 'com.google.appengine.api.urlfetch.URLFetchService', + 'com.google.appengine.api.urlfetch.URLFetchServiceFactory', + 'com.google.gson.Gson', + 'com.google.gson.GsonBuilder', + 'com.google.gson.TypeAdapter', + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonWriter', + 'com.google.iam.v1.Binding$Builder', + 'com.google.iam.v1.Binding', + 'com.google.iam.v1.Policy$Builder', + 'com.google.iam.v1.Policy', + 'com.google.protobuf.AbstractMessageLite$Builder', + 'com.google.protobuf.AbstractParser', + 'com.google.protobuf.Any$Builder', + 'com.google.protobuf.Any', + 'com.google.protobuf.AnyOrBuilder', + 'com.google.protobuf.AnyProto', + 'com.google.protobuf.Api$Builder', + 'com.google.protobuf.Api', + 'com.google.protobuf.ApiOrBuilder', + 'com.google.protobuf.ApiProto', + 'com.google.protobuf.ByteString', + 'com.google.protobuf.CodedInputStream', + 'com.google.protobuf.CodedOutputStream', + 'com.google.protobuf.DescriptorProtos', + 'com.google.protobuf.Descriptors$Descriptor', + 'com.google.protobuf.Descriptors$EnumDescriptor', + 'com.google.protobuf.Descriptors$EnumValueDescriptor', + 'com.google.protobuf.Descriptors$FieldDescriptor', + 'com.google.protobuf.Descriptors$FileDescriptor$InternalDescriptorAssigner', + 'com.google.protobuf.Descriptors$FileDescriptor', + 'com.google.protobuf.Descriptors$OneofDescriptor', + 'com.google.protobuf.Duration$Builder', + 'com.google.protobuf.Duration', + 'com.google.protobuf.DurationOrBuilder', + 'com.google.protobuf.DurationProto', + 'com.google.protobuf.EmptyProto', + 'com.google.protobuf.Enum$Builder', + 'com.google.protobuf.Enum', + 'com.google.protobuf.EnumOrBuilder', + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.ExtensionRegistryLite', + 'com.google.protobuf.FloatValue$Builder', + 'com.google.protobuf.FloatValue', + 'com.google.protobuf.FloatValueOrBuilder', + 'com.google.protobuf.GeneratedMessage$GeneratedExtension', + 'com.google.protobuf.GeneratedMessage', + 'com.google.protobuf.GeneratedMessageV3$Builder', + 'com.google.protobuf.GeneratedMessageV3$BuilderParent', + 'com.google.protobuf.GeneratedMessageV3$FieldAccessorTable', + 'com.google.protobuf.GeneratedMessageV3', + 'com.google.protobuf.Internal$EnumLite', + 'com.google.protobuf.Internal$EnumLiteMap', + 'com.google.protobuf.Internal', + 'com.google.protobuf.InvalidProtocolBufferException', + 'com.google.protobuf.LazyStringArrayList', + 'com.google.protobuf.LazyStringList', + 'com.google.protobuf.MapEntry$Builder', + 'com.google.protobuf.MapEntry', + 'com.google.protobuf.MapField', + 'com.google.protobuf.Message', + 'com.google.protobuf.MessageOrBuilder', + 'com.google.protobuf.Parser', + 'com.google.protobuf.ProtocolMessageEnum', + 'com.google.protobuf.ProtocolStringList', + 'com.google.protobuf.RepeatedFieldBuilderV3', + 'com.google.protobuf.SingleFieldBuilderV3', + 'com.google.protobuf.Struct$Builder', + 'com.google.protobuf.Struct', + 'com.google.protobuf.StructOrBuilder', + 'com.google.protobuf.StructProto', + 'com.google.protobuf.Timestamp$Builder', + 'com.google.protobuf.Timestamp', + 'com.google.protobuf.TimestampProto', + 'com.google.protobuf.Type$Builder', + 'com.google.protobuf.Type', + 'com.google.protobuf.TypeOrBuilder', + 'com.google.protobuf.TypeProto', + 'com.google.protobuf.UInt32Value$Builder', + 'com.google.protobuf.UInt32Value', + 'com.google.protobuf.UInt32ValueOrBuilder', + 'com.google.protobuf.UnknownFieldSet$Builder', + 'com.google.protobuf.UnknownFieldSet', + 'com.google.protobuf.WireFormat$FieldType', + 'com.google.protobuf.WrappersProto', + 'com.google.protobuf.util.Timestamps', + 'org.apache.http.ConnectionReuseStrategy', + 'org.apache.http.Header', + 'org.apache.http.HttpEntity', + 'org.apache.http.HttpEntityEnclosingRequest', + 'org.apache.http.HttpHost', + 'org.apache.http.HttpRequest', + 'org.apache.http.HttpResponse', + 'org.apache.http.HttpVersion', + 'org.apache.http.RequestLine', + 'org.apache.http.StatusLine', + 'org.apache.http.client.AuthenticationHandler', + 'org.apache.http.client.HttpClient', + 'org.apache.http.client.HttpRequestRetryHandler', + 'org.apache.http.client.RedirectHandler', + 'org.apache.http.client.RequestDirector', + 'org.apache.http.client.UserTokenHandler', + 'org.apache.http.client.methods.HttpDelete', + 'org.apache.http.client.methods.HttpEntityEnclosingRequestBase', + 'org.apache.http.client.methods.HttpGet', + 'org.apache.http.client.methods.HttpHead', + 'org.apache.http.client.methods.HttpOptions', + 'org.apache.http.client.methods.HttpPost', + 'org.apache.http.client.methods.HttpPut', + 'org.apache.http.client.methods.HttpRequestBase', + 'org.apache.http.client.methods.HttpTrace', + 'org.apache.http.conn.ClientConnectionManager', + 'org.apache.http.conn.ConnectionKeepAliveStrategy', + 'org.apache.http.conn.params.ConnManagerParams', + 'org.apache.http.conn.params.ConnPerRouteBean', + 'org.apache.http.conn.params.ConnRouteParams', + 'org.apache.http.conn.routing.HttpRoutePlanner', + 'org.apache.http.conn.scheme.PlainSocketFactory', + 'org.apache.http.conn.scheme.Scheme', + 'org.apache.http.conn.scheme.SchemeRegistry', + 'org.apache.http.conn.ssl.SSLSocketFactory', + 'org.apache.http.conn.ssl.X509HostnameVerifier', + 'org.apache.http.entity.AbstractHttpEntity', + 'org.apache.http.impl.client.DefaultHttpClient', + 'org.apache.http.impl.client.DefaultHttpRequestRetryHandler', + 'org.apache.http.impl.conn.ProxySelectorRoutePlanner', + 'org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager', + 'org.apache.http.message.BasicHttpResponse', + 'org.apache.http.params.BasicHttpParams', + 'org.apache.http.params.HttpConnectionParams', + 'org.apache.http.params.HttpParams', + 'org.apache.http.params.HttpProtocolParams', + 'org.apache.http.protocol.HttpContext', + 'org.apache.http.protocol.HttpProcessor', + 'org.apache.http.protocol.HttpRequestExecutor' ] check { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 2908007cc432b..3b8e19f7c791c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -80,7 +80,7 @@ public Storage createClient(final String clientName) throws GeneralSecurityExcep storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } if (clientSettings.getCredential() == null) { - logger.warn("Application Default Credentials are not supported out of the box." + logger.warn("\"Application Default Credentials\" are not supported out of the box." + " Additional file system permissions have to be granted to the plugin."); } else { final ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); From a351520561cba8953cd4a2eff2069950cd234cf3 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 16:45:15 +0300 Subject: [PATCH 24/45] Create small blobs in one request --- .../gcs/GoogleCloudStorageBlobStore.java | 39 +++++++++++-------- .../repositories/gcs/MockStorage.java | 4 +- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 080e531b9f38e..a6535851eb07a 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.internal.io.Streams; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; @@ -187,24 +188,30 @@ public void close() throws IOException { */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); - final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); - Streams.copy(inputStream, java.nio.channels.Channels.newOutputStream(new WritableByteChannel() { - @Override - public boolean isOpen() { - return writeChannel.isOpen(); - } + if (blobSize > 1_000_000) { + final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); + Streams.copy(inputStream, java.nio.channels.Channels.newOutputStream(new WritableByteChannel() { + @Override + public boolean isOpen() { + return writeChannel.isOpen(); + } - @Override - public void close() throws IOException { - SocketAccess.doPrivilegedVoidIOException(writeChannel::close); - } + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(writeChannel::close); + } - @SuppressForbidden(reason = "Channel is based of a socket not a file.") - @Override - public int write(ByteBuffer src) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); - } - })); + @SuppressForbidden(reason = "Channel is based of a socket not a file.") + @Override + public int write(ByteBuffer src) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + } + })); + } else { + final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); + Streams.copy(inputStream, baos); + SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); + } } /** diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 97957ea43e19c..00f5fd0ef6e8e 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -126,12 +126,12 @@ public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { @Override public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { - throw new RuntimeException("Mock not implemented"); + return constructMockBlob(blobInfo.getName(), new byte[0], blobsMap); } @Override public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { - throw new RuntimeException("Mock not implemented"); + return constructMockBlob(blobInfo.getName(), content, blobsMap); } @Override From a9a240cee4daa18a1cb35802751131e84e93f02e Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 3 May 2018 16:56:18 +0300 Subject: [PATCH 25/45] Removed useless test --- .../gcs/GoogleCloudStorageClientSettingsTests.java | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index b731d5dd507c0..efa2e283c59db 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -196,13 +196,8 @@ private static Tuple randomCredential(final S encodedPrivateKey + "\\n-----END PRIVATE KEY-----\\n\"," + "\"client_email\":\"" + clientName + "\"," + - "\"client_id\":\"id_" + clientName + "\"," + - "\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\"," + - "\"token_uri\":\"https://accounts.google.com/o/oauth2/token\"," + - "\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"," + - "\"client_x509_cert_url\":\"https://www.googleapis.com/robot/v1/metadata/x509/" + - clientName + - "%40appspot.gserviceaccount.com\"}"; + "\"client_id\":\"id_" + clientName + "\"" + + "}"; return Tuple.tuple(credentialBuilder.build(), serviceAccount.getBytes(StandardCharsets.UTF_8)); } From d0029dffbe73249dc747d2eee068e61e023cbfd4 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sat, 5 May 2018 13:35:57 +0300 Subject: [PATCH 26/45] So close... --- .../gcs/GoogleCloudStorageTestServer.java | 165 ++++++++++++++++-- .../gcs/GoogleCloudStorageBlobStore.java | 6 + 2 files changed, 152 insertions(+), 19 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 06158ff75ff8d..6c156a7e8e605 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -31,6 +31,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -38,6 +39,9 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; @@ -224,25 +228,105 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/insert handlers.insert("POST " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { - if ("resumable".equals(params.get("uploadType")) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable"); - } - - final String objectName = params.get("name"); - if (Strings.hasText(objectName) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); - } - - final Bucket bucket = buckets.get(params.get("bucket")); - if (bucket == null) { - return newError(RestStatus.NOT_FOUND, "bucket not found"); - } - - if (bucket.objects.put(objectName, EMPTY_BYTE) == null) { - String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + objectName; - return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); + final String uploadType = params.get("uploadType"); + if ("resumable".equals(uploadType)) { + final String objectName = params.get("name"); + if (Strings.hasText(objectName) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + final Bucket bucket = buckets.get(params.get("bucket")); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + if (bucket.objects.put(objectName, EMPTY_BYTE) == null) { + String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + objectName; + return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); + } else { + return newError(RestStatus.CONFLICT, "object already exist"); + } + } else if ("multipart".equals(uploadType)) { + +// A multipart request body looks like this: +// --__END_OF_PART__ +// Content-Length: 135 +// Content-Type: application/json; charset=UTF-8 +// content-transfer-encoding: binary +// +// {"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==", +// "name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} +// --__END_OF_PART__ +// content-transfer-encoding: binary +// +// KEwE3bU4TuyetBgQIghmUw +// --__END_OF_PART__-- + + String boundary = "__END_OF_PART__"; + // Determine the multipart boundary + final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); + if (contentTypes != null) { + final String contentType = contentTypes.get(0); + if (contentType != null && contentType.contains("multipart/related; boundary=")) { + boundary = contentType.replace("multipart/related; boundary=", ""); + } + } + InputStream inputStreamBody = new ByteArrayInputStream(body); + final List contentEncodings = headers.getOrDefault("Content-Encoding", headers.get("Content-encoding")); + if (contentEncodings != null) { + if (contentEncodings.stream().anyMatch(x -> "gzip".equalsIgnoreCase(x))) { + inputStreamBody = new GZIPInputStream(inputStreamBody); + } + } + String objectName = null; + String bucketName = null; + byte[] objectData = null; + byte[] metadata = null; + // Read line by line ?both? multiparts + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.UTF_8))) { + String line; + while ((line = reader.readLine()) != null) { + // Start of a batched request + if (line.equals("--" + boundary)) { + final Map> partHeaders = new HashMap<>(); + // Reads the headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || line.length() == 0) { + // end of headers + break; + } else { + String[] header = line.split(":", 2); + partHeaders.put(header[0], singletonList(header[1])); + } + } + line = reader.readLine(); + final List partContentTypes = partHeaders.getOrDefault("Content-Type", partHeaders.get("Content-type")); + if (partContentTypes != null && partContentTypes.stream().anyMatch(x -> x.contains("application/json"))) { + // metadata part + final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); + objectNameMatcher.find(); + objectName = objectNameMatcher.group(1); + final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); + bucketNameMatcher.find(); + bucketName = bucketNameMatcher.group(1); + metadata = line.getBytes(StandardCharsets.UTF_8); + } else { + objectData = line.getBytes(StandardCharsets.UTF_8); + } + } + } + } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + bucket.objects.put(objectName, body); + if (objectName != null && bucketName != null && objectData != null) { + //return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucketName, objectName, objectData)); + return new Response(RestStatus.OK, emptyMap(), XContentType.JSON.mediaType(), metadata); + } else { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request"); + } } else { - return newError(RestStatus.CONFLICT, "object already exist"); + return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable or multipart"); } }); @@ -301,6 +385,48 @@ private static PathTrie defaultHandlers(final String endpoint, f return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); }); + // Rewrite Object + // + // https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite + handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/rewriteTo/b/{destBucket}/o/{dest}", + (params, headers, body) -> { + final String source = params.get("src"); + if (Strings.hasText(source) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); + } + final Bucket srcBucket = buckets.get(params.get("srcBucket")); + if (srcBucket == null) { + return newError(RestStatus.NOT_FOUND, "source bucket not found"); + } + final String dest = params.get("dest"); + if (Strings.hasText(dest) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); + } + final Bucket destBucket = buckets.get(params.get("destBucket")); + if (destBucket == null) { + return newError(RestStatus.NOT_FOUND, "destination bucket not found"); + } + final byte[] sourceBytes = srcBucket.objects.get(source); + if (sourceBytes == null) { + return newError(RestStatus.NOT_FOUND, "source object not found"); + } + destBucket.objects.put(dest, sourceBytes); + final XContentBuilder respBuilder = jsonBuilder().startObject() + .field("kind", "storage#rewriteResponse") + .field("totalBytesRewritten", String.valueOf(sourceBytes.length)) + .field("objectSize", String.valueOf(sourceBytes.length)) + .field("done", true) + .startObject("resource") + .field("kind", "storage#object") + .field("id", String.join("/", destBucket.name, dest)) + .field("name", dest) + .field("bucket", destBucket.name) + .field("size", String.valueOf(sourceBytes.length)) + .endObject() + .endObject(); + return newResponse(RestStatus.OK, emptyMap(), respBuilder); + }); + // List Objects // // https://cloud.google.com/storage/docs/json_api/v1/objects/list @@ -353,7 +479,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // Batch // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch - handlers.insert("POST " + endpoint + "/batch", (params, headers, body) -> { + handlers.insert("POST " + endpoint + "/batch/storage/v1", (params, headers, body) -> { final List batchedResponses = new ArrayList<>(); // A batch request body looks like this: @@ -591,6 +717,7 @@ private static XContentBuilder buildObjectResource(final XContentBuilder builder .field("kind", "storage#object") .field("id", String.join("/", bucket, name)) .field("name", name) + .field("bucket", bucket) .field("size", String.valueOf(bytes.length)) .endObject(); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index a6535851eb07a..9afa78218bf58 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -245,6 +245,12 @@ void deleteBlobs(Collection blobNames) throws IOException { if ((blobNames == null) || blobNames.isEmpty()) { return; } + if (blobNames.size() < 5) { + for (final String blobName : blobNames) { + deleteBlob(blobName); + } + return; + } final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobIdsToDelete)); assert blobIdsToDelete.size() == deletedStatuses.size(); From 132c2ea171e5705e4063c5a4520fc654a9afa2e4 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 7 May 2018 11:39:01 +0300 Subject: [PATCH 27/45] Hooray! --- .../gcs/GoogleCloudStorageTestServer.java | 102 ++++++++++-------- .../gcs/GoogleCloudStorageBlobStore.java | 2 +- 2 files changed, 61 insertions(+), 43 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 6c156a7e8e605..b3b41dde1d5b3 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -35,6 +35,7 @@ import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -169,7 +170,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/buckets/get handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}", (params, headers, body) -> { - String name = params.get("bucket"); + final String name = params.get("bucket"); if (Strings.hasText(name) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "bucket name is missing"); } @@ -185,7 +186,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/get handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String objectName = params.get("object"); + final String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); } @@ -195,7 +196,7 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "bucket not found"); } - for (Map.Entry object : bucket.objects.entrySet()) { + for (final Map.Entry object : bucket.objects.entrySet()) { if (object.getKey().equals(objectName)) { return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucket.name, objectName, object.getValue())); } @@ -207,7 +208,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/delete handlers.insert("DELETE " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String objectName = params.get("object"); + final String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); } @@ -239,14 +240,14 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "bucket not found"); } if (bucket.objects.put(objectName, EMPTY_BYTE) == null) { - String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + objectName; + final String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + + objectName; return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); } else { return newError(RestStatus.CONFLICT, "object already exist"); } } else if ("multipart".equals(uploadType)) { - -// A multipart request body looks like this: +// A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): // --__END_OF_PART__ // Content-Length: 135 // Content-Type: application/json; charset=UTF-8 @@ -259,13 +260,12 @@ private static PathTrie defaultHandlers(final String endpoint, f // // KEwE3bU4TuyetBgQIghmUw // --__END_OF_PART__-- - String boundary = "__END_OF_PART__"; // Determine the multipart boundary final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); if (contentTypes != null) { final String contentType = contentTypes.get(0); - if (contentType != null && contentType.contains("multipart/related; boundary=")) { + if ((contentType != null) && contentType.contains("multipart/related; boundary=")) { boundary = contentType.replace("multipart/related; boundary=", ""); } } @@ -281,25 +281,26 @@ private static PathTrie defaultHandlers(final String endpoint, f byte[] objectData = null; byte[] metadata = null; // Read line by line ?both? multiparts - try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.UTF_8))) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.ISO_8859_1))) { String line; while ((line = reader.readLine()) != null) { - // Start of a batched request + // System.out.println(line); if (line.equals("--" + boundary)) { final Map> partHeaders = new HashMap<>(); // Reads the headers, if any while ((line = reader.readLine()) != null) { - if (line.equals("\r\n") || line.length() == 0) { + // System.out.println(line); + if (line.equals("\r\n") || (line.length() == 0)) { // end of headers break; } else { - String[] header = line.split(":", 2); + final String[] header = line.split(":", 2); partHeaders.put(header[0], singletonList(header[1])); } } - line = reader.readLine(); final List partContentTypes = partHeaders.getOrDefault("Content-Type", partHeaders.get("Content-type")); - if (partContentTypes != null && partContentTypes.stream().anyMatch(x -> x.contains("application/json"))) { + if ((partContentTypes != null) && partContentTypes.stream().anyMatch(x -> x.contains("application/json"))) { + line = reader.readLine(); // metadata part final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); objectNameMatcher.find(); @@ -307,9 +308,23 @@ private static PathTrie defaultHandlers(final String endpoint, f final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); bucketNameMatcher.find(); bucketName = bucketNameMatcher.group(1); - metadata = line.getBytes(StandardCharsets.UTF_8); + metadata = line.getBytes(StandardCharsets.ISO_8859_1); } else { - objectData = line.getBytes(StandardCharsets.UTF_8); + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int c; + while ((c = reader.read()) != -1) { + baos.write(c); + } + final byte[] temp = baos.toByteArray(); + final byte[] trailingEnding = ("\r\n--" + boundary + "--\r\n").getBytes(StandardCharsets.ISO_8859_1); + // check trailing + // System.out.println(new String(temp)); + for (int i = trailingEnding.length - 1; i >= 0; i--) { + if (trailingEnding[i] != temp[(temp.length - trailingEnding.length) + i]) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request " + i); + } + } + objectData = Arrays.copyOf(temp, temp.length - trailingEnding.length); } } } @@ -318,9 +333,12 @@ private static PathTrie defaultHandlers(final String endpoint, f if (bucket == null) { return newError(RestStatus.NOT_FOUND, "bucket not found"); } - bucket.objects.put(objectName, body); - if (objectName != null && bucketName != null && objectData != null) { + if ((objectName != null) && (bucketName != null) && (objectData != null)) { //return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucketName, objectName, objectData)); + // System.out.println(">>>>>>>>>>"); + // System.out.println(new String(objectData)); + // System.out.println(">>>>>>>>>>"); + bucket.objects.put(objectName, objectData); return new Response(RestStatus.OK, emptyMap(), XContentType.JSON.mediaType(), metadata); } else { return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request"); @@ -334,7 +352,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload handlers.insert("PUT " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { - String objectId = params.get("upload_id"); + final String objectId = params.get("upload_id"); if (Strings.hasText(objectId) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload id is missing"); } @@ -356,7 +374,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/copy handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/copyTo/b/{destBucket}/o/{dest}", (params, headers, body)-> { - String source = params.get("src"); + final String source = params.get("src"); if (Strings.hasText(source) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); } @@ -366,7 +384,7 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "source bucket not found"); } - String dest = params.get("dest"); + final String dest = params.get("dest"); if (Strings.hasText(dest) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); } @@ -443,8 +461,8 @@ private static PathTrie defaultHandlers(final String endpoint, f builder.startArray("items"); final String prefixParam = params.get("prefix"); - for (Map.Entry object : bucket.objects.entrySet()) { - if (prefixParam != null && object.getKey().startsWith(prefixParam) == false) { + for (final Map.Entry object : bucket.objects.entrySet()) { + if ((prefixParam != null) && (object.getKey().startsWith(prefixParam) == false)) { continue; } buildObjectResource(builder, bucket.name, object.getKey(), object.getValue()); @@ -459,7 +477,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/request-body handlers.insert("GET " + endpoint + "/download/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String object = params.get("object"); + final String object = params.get("object"); if (Strings.hasText(object) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object id is missing"); } @@ -511,7 +529,7 @@ private static PathTrie defaultHandlers(final String endpoint, f final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); if (contentTypes != null) { final String contentType = contentTypes.get(0); - if (contentType != null && contentType.contains("multipart/mixed; boundary=")) { + if ((contentType != null) && contentType.contains("multipart/mixed; boundary=")) { boundary = contentType.replace("multipart/mixed; boundary=", ""); } } @@ -524,25 +542,25 @@ private static PathTrie defaultHandlers(final String endpoint, f while ((line = reader.readLine()) != null) { // Start of a batched request if (line.equals("--" + boundary)) { - Map> batchedHeaders = new HashMap<>(); + final Map> batchedHeaders = new HashMap<>(); // Reads the headers, if any while ((line = reader.readLine()) != null) { - if (line.equals("\r\n") || line.length() == 0) { + if (line.equals("\r\n") || (line.length() == 0)) { // end of headers break; } else { - String[] header = line.split(":", 2); + final String[] header = line.split(":", 2); batchedHeaders.put(header[0], singletonList(header[1])); } } // Reads the method and URL line = reader.readLine(); - String batchedUrl = line.substring(0, line.lastIndexOf(' ')); + final String batchedUrl = line.substring(0, line.lastIndexOf(' ')); final Map batchedParams = new HashMap<>(); - int questionMark = batchedUrl.indexOf('?'); + final int questionMark = batchedUrl.indexOf('?'); if (questionMark != -1) { RestUtils.decodeQueryString(batchedUrl.substring(questionMark + 1), 0, batchedParams); } @@ -550,16 +568,16 @@ private static PathTrie defaultHandlers(final String endpoint, f // Reads the body line = reader.readLine(); byte[] batchedBody = new byte[0]; - if (line != null || line.startsWith("--" + boundary) == false) { + if ((line != null) || (line.startsWith("--" + boundary) == false)) { batchedBody = line.getBytes(StandardCharsets.UTF_8); } // Executes the batched request - RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); + final RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); if (handler != null) { try { batchedResponses.add(handler.execute(batchedParams, batchedHeaders, batchedBody)); - } catch (IOException e) { + } catch (final IOException e) { batchedResponses.add(newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); } } @@ -568,11 +586,11 @@ private static PathTrie defaultHandlers(final String endpoint, f } // Now we can build the response - String sep = "--"; - String line = "\r\n"; + final String sep = "--"; + final String line = "\r\n"; - StringBuilder builder = new StringBuilder(); - for (Response response : batchedResponses) { + final StringBuilder builder = new StringBuilder(); + for (final Response response : batchedResponses) { builder.append(sep).append(boundary).append(line); builder.append("Content-Type: application/http").append(line); builder.append(line); @@ -591,7 +609,7 @@ private static PathTrie defaultHandlers(final String endpoint, f builder.append(line); builder.append(sep).append(boundary).append(sep); - byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); + final byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); return new Response(RestStatus.OK, emptyMap(), "multipart/mixed; boundary=" + boundary, content); }); @@ -651,7 +669,7 @@ private static Response newResponse(final RestStatus status, final Map 1_000_000) { + if (blobSize > (5 * 1024 * 1024)) { final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); Streams.copy(inputStream, java.nio.channels.Channels.newOutputStream(new WritableByteChannel() { @Override From da80077a75bc6c2acecf27d81ce6513acda61cc5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 7 May 2018 12:43:19 +0300 Subject: [PATCH 28/45] Check finally passes on the REST mock --- .../gcs/GoogleCloudStorageTestServer.java | 170 ++++++++++-------- 1 file changed, 95 insertions(+), 75 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index b3b41dde1d5b3..8b8101252d672 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -247,19 +247,19 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.CONFLICT, "object already exist"); } } else if ("multipart".equals(uploadType)) { -// A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): -// --__END_OF_PART__ -// Content-Length: 135 -// Content-Type: application/json; charset=UTF-8 -// content-transfer-encoding: binary -// -// {"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==", -// "name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} -// --__END_OF_PART__ -// content-transfer-encoding: binary -// -// KEwE3bU4TuyetBgQIghmUw -// --__END_OF_PART__-- +/* A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): +*--__END_OF_PART__ +*Content-Length: 135 +*Content-Type: application/json; charset=UTF-8 +*content-transfer-encoding: binary +* +*{"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==","name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} +*--__END_OF_PART__ +*content-transfer-encoding: binary +* +*KEwE3bU4TuyetBgQIghmUw +*--__END_OF_PART__-- +*/ String boundary = "__END_OF_PART__"; // Determine the multipart boundary final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); @@ -269,79 +269,99 @@ private static PathTrie defaultHandlers(final String endpoint, f boundary = contentType.replace("multipart/related; boundary=", ""); } } - InputStream inputStreamBody = new ByteArrayInputStream(body); + InputStream inputStreamBody = new ByteArrayInputStream(body); final List contentEncodings = headers.getOrDefault("Content-Encoding", headers.get("Content-encoding")); if (contentEncodings != null) { if (contentEncodings.stream().anyMatch(x -> "gzip".equalsIgnoreCase(x))) { inputStreamBody = new GZIPInputStream(inputStreamBody); } } - String objectName = null; - String bucketName = null; - byte[] objectData = null; - byte[] metadata = null; - // Read line by line ?both? multiparts + // Read line by line ?both? parts of the multipart try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.ISO_8859_1))) { String line; + // read first part delimiter + line = reader.readLine(); + if ((line == null) || (line.equals("--" + boundary) == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Does not start with the part delimiter."); + } + final Map> firstPartHeaders = new HashMap<>(); + // Reads the first part's headers, if any while ((line = reader.readLine()) != null) { - // System.out.println(line); - if (line.equals("--" + boundary)) { - final Map> partHeaders = new HashMap<>(); - // Reads the headers, if any - while ((line = reader.readLine()) != null) { - // System.out.println(line); - if (line.equals("\r\n") || (line.length() == 0)) { - // end of headers - break; - } else { - final String[] header = line.split(":", 2); - partHeaders.put(header[0], singletonList(header[1])); - } - } - final List partContentTypes = partHeaders.getOrDefault("Content-Type", partHeaders.get("Content-type")); - if ((partContentTypes != null) && partContentTypes.stream().anyMatch(x -> x.contains("application/json"))) { - line = reader.readLine(); - // metadata part - final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); - objectNameMatcher.find(); - objectName = objectNameMatcher.group(1); - final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); - bucketNameMatcher.find(); - bucketName = bucketNameMatcher.group(1); - metadata = line.getBytes(StandardCharsets.ISO_8859_1); - } else { - final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - int c; - while ((c = reader.read()) != -1) { - baos.write(c); - } - final byte[] temp = baos.toByteArray(); - final byte[] trailingEnding = ("\r\n--" + boundary + "--\r\n").getBytes(StandardCharsets.ISO_8859_1); - // check trailing - // System.out.println(new String(temp)); - for (int i = trailingEnding.length - 1; i >= 0; i--) { - if (trailingEnding[i] != temp[(temp.length - trailingEnding.length) + i]) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request " + i); - } - } - objectData = Arrays.copyOf(temp, temp.length - trailingEnding.length); - } + if (line.equals("\r\n") || (line.length() == 0)) { + // end of headers + break; + } else { + final String[] header = line.split(":", 2); + firstPartHeaders.put(header[0], singletonList(header[1])); } } - } - final Bucket bucket = buckets.get(bucketName); - if (bucket == null) { - return newError(RestStatus.NOT_FOUND, "bucket not found"); - } - if ((objectName != null) && (bucketName != null) && (objectData != null)) { - //return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucketName, objectName, objectData)); - // System.out.println(">>>>>>>>>>"); - // System.out.println(new String(objectData)); - // System.out.println(">>>>>>>>>>"); - bucket.objects.put(objectName, objectData); - return new Response(RestStatus.OK, emptyMap(), XContentType.JSON.mediaType(), metadata); - } else { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request"); + final List firstPartContentTypes = firstPartHeaders.getOrDefault("Content-Type", + firstPartHeaders.get("Content-type")); + if ((firstPartContentTypes == null) + || (firstPartContentTypes.stream().noneMatch(x -> x.contains("application/json")))) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Metadata part expected to have the \"application/json\" content type."); + } + // read metadata part + line = reader.readLine(); + final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); + objectNameMatcher.find(); + final String objectName = objectNameMatcher.group(1); + final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); + bucketNameMatcher.find(); + final String bucketName = bucketNameMatcher.group(1); + final byte[] metadata = line.getBytes(StandardCharsets.ISO_8859_1); + // read second part delimiter + line = reader.readLine(); + if ((line == null) || (line.equals("--" + boundary) == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Second part does not start with delimiter. " + + "Is the metadata multi-line?"); + } + final Map> secondPartHeaders = new HashMap<>(); + // Reads the second part's headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || (line.length() == 0)) { + // end of headers + break; + } else { + final String[] header = line.split(":", 2); + secondPartHeaders.put(header[0], singletonList(header[1])); + } + } + final List secondPartTransferEncoding = secondPartHeaders.getOrDefault("Content-Transfer-Encoding", + secondPartHeaders.get("content-transfer-encoding")); + if ((secondPartTransferEncoding == null) + || (secondPartTransferEncoding.stream().noneMatch(x -> x.contains("binary")))) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Data part expected to have the \"binary\" content transfer encoding."); + } + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int c; + while ((c = reader.read()) != -1) { + // one char to one byte, because of the ISO_8859_1 encoding + baos.write(c); + } + final byte[] temp = baos.toByteArray(); + final byte[] trailingEnding = ("\r\n--" + boundary + "--\r\n").getBytes(StandardCharsets.ISO_8859_1); + // check trailing + for (int i = trailingEnding.length - 1; i >= 0; i--) { + if (trailingEnding[i] != temp[(temp.length - trailingEnding.length) + i]) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "Error parsing multipart request."); + } + } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + final byte[] objectData = Arrays.copyOf(temp, temp.length - trailingEnding.length); + if ((objectName != null) && (bucketName != null) && (objectData != null)) { + bucket.objects.put(objectName, objectData); + return new Response(RestStatus.OK, emptyMap(), XContentType.JSON.mediaType(), metadata); + } else { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request"); + } } } else { return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable or multipart"); From e239e4c2cbbebae52baf720592c8e3d7035984af Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 7 May 2018 13:20:18 +0300 Subject: [PATCH 29/45] final touches --- plugins/repository-gcs/build.gradle | 3 --- .../gcs/GoogleCloudStorageBlobStore.java | 4 ++++ .../gcs/GoogleCloudStorageRepository.java | 17 +++++++++-------- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index d38e5496fa977..4896b80360772 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -18,9 +18,6 @@ */ import org.elasticsearch.gradle.test.AntFixture -import java.security.KeyPair -import java.security.KeyPairGenerator - esplugin { description 'The GCS repository plugin adds Google Cloud Storage support for repositories.' diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 9912b25df2b0e..44a8dbb56ebd9 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -189,6 +189,8 @@ public void close() throws IOException { void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); if (blobSize > (5 * 1024 * 1024)) { + // uses "resumable upload" for files larger than 5MB, see + // https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); Streams.copy(inputStream, java.nio.channels.Channels.newOutputStream(new WritableByteChannel() { @Override @@ -208,6 +210,8 @@ public int write(ByteBuffer src) throws IOException { } })); } else { + // uses multipart upload for small files (1 request for both data and metadata, + // gziped) final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); Streams.copy(inputStream, baos); SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 2cf9b939ae8f9..976befae0a269 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.cloud.storage.Storage; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -39,6 +38,8 @@ import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.simpleString; +import com.google.cloud.storage.Storage; + class GoogleCloudStorageRepository extends BlobStoreRepository { // package private for testing @@ -67,12 +68,12 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { GoogleCloudStorageService storageService) throws Exception { super(metadata, environment.settings(), namedXContentRegistry); - final String bucket = getSetting(BUCKET, metadata); - final String clientName = CLIENT_NAME.get(metadata.settings()); - final String basePath = BASE_PATH.get(metadata.settings()); + String bucket = getSetting(BUCKET, metadata); + String clientName = CLIENT_NAME.get(metadata.settings()); + String basePath = BASE_PATH.get(metadata.settings()); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); - for (final String elem : basePath.split("/")) { + for (String elem : basePath.split("/")) { path = path.add(elem); } this.basePath = path; @@ -85,7 +86,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); - final Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName)); + Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName)); this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); } @@ -114,11 +115,11 @@ protected ByteSizeValue chunkSize() { * Get a given setting from the repository settings, throwing a {@link RepositoryException} if the setting does not exist or is empty. */ static T getSetting(Setting setting, RepositoryMetaData metadata) { - final T value = setting.get(metadata.settings()); + T value = setting.get(metadata.settings()); if (value == null) { throw new RepositoryException(metadata.name(), "Setting [" + setting.getKey() + "] is not defined for repository"); } - if ((value instanceof String) && ((Strings.hasText((String) value)) == false)) { + if ((value instanceof String) && (Strings.hasText((String) value)) == false) { throw new RepositoryException(metadata.name(), "Setting [" + setting.getKey() + "] is empty for repository"); } return value; From b97abf44564b315f925f2b0c0734c872acf750ef Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 7 May 2018 13:31:27 +0300 Subject: [PATCH 30/45] Changelog entry --- docs/CHANGELOG.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc index f9cb572eb81a4..d9445b263929b 100644 --- a/docs/CHANGELOG.asciidoc +++ b/docs/CHANGELOG.asciidoc @@ -150,6 +150,9 @@ option. ({pull}30140[#29658]) A new analysis plugin called `analysis_nori` that exposes the Lucene Korean analysis module. ({pull}30397[#30397]) +The `repository-gcs` plugins now uses the latest `google-cloud-storage` client +library replacing the old `google-api-client` one. ({pull}30168[#30168]) + [float] === Enhancements From 15a5d51e04150fcfadbf78881fed62f703fa4ab6 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 7 May 2018 20:43:38 +0300 Subject: [PATCH 31/45] Feedback part 1 --- docs/CHANGELOG.asciidoc | 2 +- plugins/repository-gcs/build.gradle | 1 - .../gcs/GoogleCloudStorageTestServer.java | 66 ++++++++----- .../gcs/GoogleCloudStorageBlobStore.java | 99 ++++++++++++------- 4 files changed, 104 insertions(+), 64 deletions(-) diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc index d9445b263929b..3a5adadcd8750 100644 --- a/docs/CHANGELOG.asciidoc +++ b/docs/CHANGELOG.asciidoc @@ -151,7 +151,7 @@ A new analysis plugin called `analysis_nori` that exposes the Lucene Korean analysis module. ({pull}30397[#30397]) The `repository-gcs` plugins now uses the latest `google-cloud-storage` client -library replacing the old `google-api-client` one. ({pull}30168[#30168]) +library replacing the deprecated `google-api-client` one. ({pull}30168[#30168]) [float] === Enhancements diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 4896b80360772..8532c29e33fa0 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -45,7 +45,6 @@ dependencies { compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' compile 'io.grpc:grpc-context:1.9.0' compile 'io.opencensus:opencensus-api:0.11.1' - compile 'io.opencensus:opencensus-api:0.11.1' compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' compile 'org.threeten:threetenbp:1.3.6' } diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 8b8101252d672..de9de55213e0d 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -247,19 +247,21 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.CONFLICT, "object already exist"); } } else if ("multipart".equals(uploadType)) { -/* A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): -*--__END_OF_PART__ -*Content-Length: 135 -*Content-Type: application/json; charset=UTF-8 -*content-transfer-encoding: binary -* -*{"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==","name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} -*--__END_OF_PART__ -*content-transfer-encoding: binary -* -*KEwE3bU4TuyetBgQIghmUw -*--__END_OF_PART__-- -*/ + /* + * A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): + * --__END_OF_PART__ + * Content-Length: 135 + * Content-Type: application/json; charset=UTF-8 + * content-transfer-encoding: binary + * + * {"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==", + * "name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} + * --__END_OF_PART__ + * content-transfer-encoding: binary + * + * KEwE3bU4TuyetBgQIghmUw + * --__END_OF_PART__-- + */ String boundary = "__END_OF_PART__"; // Determine the multipart boundary final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); @@ -449,19 +451,8 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "source object not found"); } destBucket.objects.put(dest, sourceBytes); - final XContentBuilder respBuilder = jsonBuilder().startObject() - .field("kind", "storage#rewriteResponse") - .field("totalBytesRewritten", String.valueOf(sourceBytes.length)) - .field("objectSize", String.valueOf(sourceBytes.length)) - .field("done", true) - .startObject("resource") - .field("kind", "storage#object") - .field("id", String.join("/", destBucket.name, dest)) - .field("name", dest) - .field("bucket", destBucket.name) - .field("size", String.valueOf(sourceBytes.length)) - .endObject() - .endObject(); + final XContentBuilder respBuilder = jsonBuilder(); + buildRewriteResponse(respBuilder, destBucket.name, dest, sourceBytes.length); return newResponse(RestStatus.OK, emptyMap(), respBuilder); }); @@ -759,4 +750,27 @@ private static XContentBuilder buildObjectResource(final XContentBuilder builder .field("size", String.valueOf(bytes.length)) .endObject(); } + + /** + * Builds the rewrite response as defined by + * https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite + */ + private static XContentBuilder buildRewriteResponse(final XContentBuilder builder, final String destBucket, final String dest, + final int byteSize) + throws IOException { + final XContentBuilder respBuilder = builder.startObject() + .field("kind", "storage#rewriteResponse") + .field("totalBytesRewritten", String.valueOf(byteSize)) + .field("objectSize", String.valueOf(byteSize)) + .field("done", true) + .startObject("resource") + .field("kind", "storage#object") + .field("id", String.join("/", destBucket, dest)) + .field("name", dest) + .field("bucket", destBucket) + .field("size", String.valueOf(byteSize)) + .endObject() + .endObject(); + return builder; + } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 44a8dbb56ebd9..4fb7c99f736c2 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -25,7 +25,6 @@ import com.google.cloud.storage.BlobId; import com.google.cloud.storage.BlobInfo; import com.google.cloud.storage.Bucket; -import com.google.cloud.storage.CopyWriter; import com.google.cloud.storage.Storage; import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.Storage.CopyRequest; @@ -46,6 +45,7 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; +import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; import java.nio.file.NoSuchFileException; @@ -58,6 +58,11 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore private final Storage storage; private final String bucket; + // The recommended maximum size of a blob that should be uploaded in a single + // request. Larger files should be uploaded over multiple requests (this is + // called "resumable upload") + // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload + private static final int LARGE_BLOB_THRESHOLD_BYTE_SIZE = 5 * 1024 * 1024; GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storage) { super(settings); @@ -103,7 +108,7 @@ boolean doesBucketExist(String bucketName) { /** * List all blobs in the bucket * - * @param path base path of the blobs to list + * @param prefix base path of the blobs to list * @return a map of blob names and their metadata */ Map listBlobs(String prefix) throws IOException { @@ -158,10 +163,10 @@ InputStream readBlob(String blobName) throws IOException { final BlobId blobId = BlobId.of(bucket, blobName); final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); if (blob == null) { - throw new NoSuchFileException("Blob [" + blobName + "] does not exit."); + throw new NoSuchFileException("Blob [" + blobName + "] does not exit"); } final ReadChannel readChannel = SocketAccess.doPrivilegedIOException(blob::reader); - return java.nio.channels.Channels.newInputStream(new ReadableByteChannel() { + return Channels.newInputStream(new ReadableByteChannel() { @SuppressForbidden(reason = "Channel is based of a socket not a file.") @Override public int read(ByteBuffer dst) throws IOException { @@ -188,36 +193,59 @@ public void close() throws IOException { */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); - if (blobSize > (5 * 1024 * 1024)) { - // uses "resumable upload" for files larger than 5MB, see - // https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload - final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); - Streams.copy(inputStream, java.nio.channels.Channels.newOutputStream(new WritableByteChannel() { - @Override - public boolean isOpen() { - return writeChannel.isOpen(); - } - - @Override - public void close() throws IOException { - SocketAccess.doPrivilegedVoidIOException(writeChannel::close); - } - - @SuppressForbidden(reason = "Channel is based of a socket not a file.") - @Override - public int write(ByteBuffer src) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); - } - })); + if (blobSize > LARGE_BLOB_THRESHOLD_BYTE_SIZE) { + writeBlobResumable(blobInfo, inputStream); } else { - // uses multipart upload for small files (1 request for both data and metadata, - // gziped) - final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); - Streams.copy(inputStream, baos); - SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); + writeBlobMultipart(blobInfo, inputStream, blobSize); } } + /** + * Uploads a blob using the "resumable upload" method (multiple requests, which + * can be independently retried in case of failure, see + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload + * + * @param blobInfo the info for the blob to be uploaded + * @param inputStream the stream containing the blob data + */ + private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { + final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); + Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { + @Override + public boolean isOpen() { + return writeChannel.isOpen(); + } + + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(writeChannel::close); + } + + @SuppressForbidden(reason = "Channel is based of a socket not a file.") + @Override + public int write(ByteBuffer src) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + } + })); + } + + /** + * Uploads a blob using the "multipart upload" method (a single + * 'multipart/related' request containing both data and metadata. the request is + * gziped), see + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload + * + * @param blobInfo the info for the blob to be uploaded + * @param inputStream the stream containing the blob data + * @param blobSize the size + */ + private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long blobSize) throws IOException { + assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; + final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); + Streams.copy(inputStream, baos); + SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); + } + /** * Deletes a blob in the bucket * @@ -246,10 +274,10 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if ((blobNames == null) || blobNames.isEmpty()) { + if (blobNames == null || blobNames.isEmpty()) { return; } - if (blobNames.size() < 5) { + if (blobNames.size() < 3) { for (final String blobName : blobNames) { deleteBlob(blobName); } @@ -266,7 +294,7 @@ void deleteBlobs(Collection blobNames) throws IOException { } } if (failed) { - throw new IOException("Failed to delete all [" + blobIdsToDelete.size() + "] blobs."); + throw new IOException("Failed to delete all [" + blobIdsToDelete.size() + "] blobs"); } } @@ -285,11 +313,10 @@ void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { .build(); SocketAccess.doPrivilegedVoidIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete - final CopyWriter copyWriter = storage.copy(request); - copyWriter.getResult(); + storage.copy(request).getResult(); final boolean deleted = storage.delete(sourceBlobId); if (deleted == false) { - throw new IOException("Failed to move source [" + sourceBlobName + "] to target [" + targetBlobName + "]."); + throw new IOException("Failed to move source [" + sourceBlobName + "] to target [" + targetBlobName + "]"); } }); } From 8ea1d0d483d049bcb6192a21e9f3f9a56947b09f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 8 May 2018 14:13:51 +0300 Subject: [PATCH 32/45] Feedback part 2 --- .../gcs/GoogleCloudStorageBlobStore.java | 23 +++++++------- .../gcs/GoogleCloudStorageClientSettings.java | 2 +- .../gcs/GoogleCloudStorageService.java | 15 ++++++---- .../plugin-metadata/plugin-security.policy | 4 +-- .../plugin-metadata/plugin-security.policy | 30 +++++++++++++++++++ .../src/test/resources/plugin-security.policy | 23 ++++++++++++++ 6 files changed, 77 insertions(+), 20 deletions(-) create mode 100644 plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy create mode 100644 plugins/repository-gcs/src/test/resources/plugin-security.policy diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 4fb7c99f736c2..dddaaa97b7416 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -106,17 +106,18 @@ boolean doesBucketExist(String bucketName) { } /** - * List all blobs in the bucket + * List blobs in the bucket under the specified path. The path root is removed. * - * @param prefix base path of the blobs to list + * @param path + * base path of the blobs to list * @return a map of blob names and their metadata */ - Map listBlobs(String prefix) throws IOException { - return listBlobsByPrefix(prefix, ""); + Map listBlobs(String path) throws IOException { + return listBlobsByPrefix(path, ""); } /** - * List all blobs in the bucket which have a prefix. + * List all blobs in the bucket which have a prefix * * @param path * base path of the blobs to list. This path is removed from the @@ -167,7 +168,7 @@ InputStream readBlob(String blobName) throws IOException { } final ReadChannel readChannel = SocketAccess.doPrivilegedIOException(blob::reader); return Channels.newInputStream(new ReadableByteChannel() { - @SuppressForbidden(reason = "Channel is based of a socket not a file.") + @SuppressForbidden(reason = "Channel is based of a socket not a file") @Override public int read(ByteBuffer dst) throws IOException { return SocketAccess.doPrivilegedIOException(() -> readChannel.read(dst)); @@ -221,7 +222,7 @@ public void close() throws IOException { SocketAccess.doPrivilegedVoidIOException(writeChannel::close); } - @SuppressForbidden(reason = "Channel is based of a socket not a file.") + @SuppressForbidden(reason = "Channel is based of a socket not a file") @Override public int write(ByteBuffer src) throws IOException { return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); @@ -231,8 +232,8 @@ public int write(ByteBuffer src) throws IOException { /** * Uploads a blob using the "multipart upload" method (a single - * 'multipart/related' request containing both data and metadata. the request is - * gziped), see + * 'multipart/related' request containing both data and metadata. The request is + * gziped), see: * https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload * * @param blobInfo the info for the blob to be uploaded @@ -274,7 +275,7 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if (blobNames == null || blobNames.isEmpty()) { + if ((blobNames == null) || blobNames.isEmpty()) { return; } if (blobNames.size() < 3) { @@ -289,7 +290,7 @@ void deleteBlobs(Collection blobNames) throws IOException { boolean failed = false; for (int i = 0; i < blobIdsToDelete.size(); i++) { if (deletedStatuses.get(i) == false) { - logger.error("Failed to delete blob [{}] in bucket [{}].", blobIdsToDelete.get(i).getName(), bucket); + logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucket); failed = true; } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 01b3dcd06d846..525440d5bef5d 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -130,7 +130,7 @@ public class GoogleCloudStorageClientSettings { final TimeValue connectTimeout, final TimeValue readTimeout, final String applicationName, - final URI tokenUri) { + final URI tokenUri) { this.credential = credential; this.host = host; this.projectId = projectId; diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 3b8e19f7c791c..f4570f7b82584 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import java.io.IOException; +import java.net.URI; import java.security.GeneralSecurityException; import java.util.Map; @@ -83,13 +84,15 @@ public Storage createClient(final String clientName) throws GeneralSecurityExcep logger.warn("\"Application Default Credentials\" are not supported out of the box." + " Additional file system permissions have to be granted to the plugin."); } else { - final ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); - if (Strings.hasLength(clientSettings.getTokenUri().toString())) { - storageOptionsBuilder - .setCredentials(serviceAccountCredentials.toBuilder().setTokenServerUri(clientSettings.getTokenUri()).build()); - } else { - storageOptionsBuilder.setCredentials(serviceAccountCredentials); + ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); + // override token server URI + final URI tokenServerUri = clientSettings.getTokenUri(); + if (Strings.hasLength(tokenServerUri.toString())) { + // Rebuild the service account credentials in order to use a custom Token url. + // This is mostly used for testing purpose. + serviceAccountCredentials = serviceAccountCredentials.toBuilder().setTokenServerUri(tokenServerUri).build(); } + storageOptionsBuilder.setCredentials(serviceAccountCredentials); } return storageOptionsBuilder.build().getService(); } diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index b46eef0744779..f0bfd0618510d 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -19,8 +19,8 @@ grant { permission java.lang.RuntimePermission "accessDeclaredMembers"; - permission java.lang.RuntimePermission "setFactory"; - permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + //permission java.lang.RuntimePermission "setFactory"; + //permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; permission java.net.URLPermission "https://www.googleapis.com/-", "*:*"; permission java.net.URLPermission "https://accounts.google.com/-", "*:*"; diff --git a/plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy new file mode 100644 index 0000000000000..a854829d6a229 --- /dev/null +++ b/plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + permission java.lang.RuntimePermission "accessDeclaredMembers"; + permission java.lang.RuntimePermission "setFactory"; + //permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; + permission java.net.URLPermission "https://www.googleapis.com/-", "*:*"; + permission java.net.URLPermission "https://accounts.google.com/-", "*:*"; + + // gcs client opens socket connections for to access repository + permission java.net.SocketPermission "*", "connect"; +}; diff --git a/plugins/repository-gcs/src/test/resources/plugin-security.policy b/plugins/repository-gcs/src/test/resources/plugin-security.policy new file mode 100644 index 0000000000000..fece20dcba580 --- /dev/null +++ b/plugins/repository-gcs/src/test/resources/plugin-security.policy @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +grant { + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; +}; + From 1a9bd4b3a17416b7479af7ebacec020ba1c9fa01 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 8 May 2018 16:55:09 +0300 Subject: [PATCH 33/45] Fix permissions --- .../repositories/gcs/GoogleCloudStorageBlobStore.java | 2 +- .../repositories/gcs/GoogleCloudStorageService.java | 1 + .../src/main/plugin-metadata/plugin-security.policy | 8 ++++---- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index dddaaa97b7416..67dc04c8c86ba 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -275,7 +275,7 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if ((blobNames == null) || blobNames.isEmpty()) { + if (blobNames == null || blobNames.isEmpty()) { return; } if (blobNames.size() < 3) { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index f4570f7b82584..a7ab41b8e45e4 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -63,6 +63,7 @@ public Storage createClient(final String clientName) throws GeneralSecurityExcep final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder() .setConnectTimeout(toTimeout(clientSettings.getConnectTimeout())) .setReadTimeout(toTimeout(clientSettings.getReadTimeout())) + // requires 'java.lang.RuntimePermission "setFactory"' .setHttpTransportFactory(() -> netHttpTransport) .build(); final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index f0bfd0618510d..fffe6cbbc0f24 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -18,12 +18,12 @@ */ grant { + // required by: com.google.api.client.json.JsonParser#parseValue permission java.lang.RuntimePermission "accessDeclaredMembers"; - //permission java.lang.RuntimePermission "setFactory"; - //permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; + // required by: com.google.api.client.json.GenericJson# permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - permission java.net.URLPermission "https://www.googleapis.com/-", "*:*"; - permission java.net.URLPermission "https://accounts.google.com/-", "*:*"; + // required to add google certs to the gcs client trustore + permission java.lang.RuntimePermission "setFactory"; // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; From ccd7bcd01d53429da2a449f4d342d813742c4d72 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 10 May 2018 11:46:30 +0300 Subject: [PATCH 34/45] Revert HOST / ENDPOINT setting --- .../qa/google-cloud-storage/build.gradle | 2 +- .../gcs/GoogleCloudStorageClientSettings.java | 20 +++---------------- .../gcs/GoogleCloudStoragePlugin.java | 1 - ...GoogleCloudStorageClientSettingsTests.java | 10 ++-------- .../gcs/GoogleCloudStorageServiceTests.java | 2 +- 5 files changed, 7 insertions(+), 28 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle index 942a4fb31e80b..ec288ec691b1e 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle +++ b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle @@ -104,7 +104,7 @@ integTestCluster { if (useFixture) { dependsOn createServiceAccountFile, googleCloudStorageFixture /* Use a closure on the string to delay evaluation until tests are executed */ - setting 'gcs.client.integration_test.host', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.endpoint', "http://${ -> googleCloudStorageFixture.addressAndPort }" setting 'gcs.client.integration_test.token_uri', "http://${ -> googleCloudStorageFixture.addressAndPort }/o/oauth2/token" } else { println "Using an external service to test the repository-gcs plugin" diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 525440d5bef5d..2f10612e2b40c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -51,24 +51,10 @@ public class GoogleCloudStorageClientSettings { key -> SecureSetting.secureFile(key, null)); /** - * An override for the Storage endpoint to connect to. Deprecated, use host - * setting. + * An override for the Storage endpoint to connect to. */ static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", - key -> Setting.simpleString(key, Setting.Property.NodeScope, Setting.Property.Deprecated)); - - /** An override for the Storage host name to connect to. */ - static final Setting.AffixSetting HOST_SETTING = Setting.affixKeySetting(PREFIX, "host", - key -> { - // falback to the deprecated setting - if (key.endsWith("host")) { - return Setting.simpleString(key, - ENDPOINT_SETTING.getConcreteSetting(key.substring(0, key.length() - "host".length()) + "endpoint"), - Setting.Property.NodeScope); - } else { - return Setting.simpleString(key, Setting.Property.NodeScope); - } - }); + key -> Setting.simpleString(key, Setting.Property.NodeScope)); /** An override for the Google Project ID. */ static final Setting.AffixSetting PROJECT_ID_SETTING = Setting.affixKeySetting(PREFIX, "project_id", @@ -184,7 +170,7 @@ public static Map load(final Settings static GoogleCloudStorageClientSettings getClientSettings(final Settings settings, final String clientName) { return new GoogleCloudStorageClientSettings( loadCredential(settings, clientName), - getConfigValue(settings, clientName, HOST_SETTING), + getConfigValue(settings, clientName, ENDPOINT_SETTING), getConfigValue(settings, clientName, PROJECT_ID_SETTING), getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 7d232506e2e57..1d2d70584adf9 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -60,7 +60,6 @@ public List> getSettings() { return Arrays.asList( GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING, GoogleCloudStorageClientSettings.ENDPOINT_SETTING, - GoogleCloudStorageClientSettings.HOST_SETTING, GoogleCloudStorageClientSettings.PROJECT_ID_SETTING, GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index efa2e283c59db..2d70dc8e59e38 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -43,7 +43,6 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; -import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.HOST_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.PROJECT_ID_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; @@ -132,14 +131,9 @@ private static GoogleCloudStorageClientSettings randomClient(final String client String host; if (randomBoolean()) { host = randomAlphaOfLength(5); - if (randomBoolean()) { - settings.put(HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); - } else { - settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); - deprecationWarnings.add(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName)); - } + settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); } else { - host = HOST_SETTING.getDefault(Settings.EMPTY); + host = ENDPOINT_SETTING.getDefault(Settings.EMPTY); } String projectId; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 00bd1b8c1ca44..a838f8a38ca05 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -54,7 +54,7 @@ public void testClientInitializer() throws GeneralSecurityException, IOException readTimeValue.getStringRep()) .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName) - .put(GoogleCloudStorageClientSettings.HOST_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) + .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); when(environment.settings()).thenReturn(settings); From 5f3364c89a79dd39c12dcdf6ec15f9c28059caa4 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 10 May 2018 12:06:31 +0300 Subject: [PATCH 35/45] Remove unused in mock --- .../qa/google-cloud-storage/build.gradle | 1 - .../gcs/GoogleCloudStorageTestServer.java | 37 ++----------------- 2 files changed, 3 insertions(+), 35 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle index ec288ec691b1e..34ec92a354277 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle +++ b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle @@ -69,7 +69,6 @@ task googleCloudStorageFixture(type: AntFixture) { /** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ task createServiceAccountFile() { - dependsOn googleCloudStorageFixture doLast { KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") keyPairGenerator.initialize(1024) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index de9de55213e0d..7444fadfb9816 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -57,7 +57,7 @@ */ public class GoogleCloudStorageTestServer { - private static byte[] EMPTY_BYTE = new byte[0]; + private static final byte[] EMPTY_BYTE = new byte[0]; /** List of the buckets stored on this test server **/ private final Map buckets = ConcurrentCollections.newConcurrentMap(); @@ -68,13 +68,6 @@ public class GoogleCloudStorageTestServer { /** Server endpoint **/ private final String endpoint; - /** - * Creates a {@link GoogleCloudStorageTestServer} with the default endpoint - */ - GoogleCloudStorageTestServer() { - this("https://www.googleapis.com"); - } - /** * Creates a {@link GoogleCloudStorageTestServer} with a custom endpoint */ @@ -92,29 +85,6 @@ public String getEndpoint() { return endpoint; } - /** - * Returns a Google Cloud Storage response for the given request - * - * @param method the HTTP method of the request - * @param url the HTTP URL of the request - * @param headers the HTTP headers of the request - * @param body the HTTP request body - * @return a {@link Response} - * - * @throws IOException if something goes wrong - */ - public Response handle(final String method, - final String url, - final Map> headers, - byte[] body) throws IOException { - - final int questionMark = url.indexOf('?'); - if (questionMark == -1) { - return handle(method, url, null, headers, body); - } - return handle(method, url.substring(0, questionMark), url.substring(questionMark + 1), headers, body); - } - /** * Returns a Google Cloud Storage response for the given request * @@ -756,9 +726,8 @@ private static XContentBuilder buildObjectResource(final XContentBuilder builder * https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite */ private static XContentBuilder buildRewriteResponse(final XContentBuilder builder, final String destBucket, final String dest, - final int byteSize) - throws IOException { - final XContentBuilder respBuilder = builder.startObject() + final int byteSize) throws IOException { + builder.startObject() .field("kind", "storage#rewriteResponse") .field("totalBytesRewritten", String.valueOf(byteSize)) .field("objectSize", String.valueOf(byteSize)) From d3e9fbe587624e83bfe6effc30b4690eeeed6d98 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 10 May 2018 15:26:46 +0300 Subject: [PATCH 36/45] Collapse rewriteTo and copyTo actions --- .../gcs/GoogleCloudStorageTestServer.java | 64 +++++++------------ .../gcs/GoogleCloudStorageService.java | 4 +- .../gcs/GoogleCloudStorageServiceTests.java | 4 +- 3 files changed, 25 insertions(+), 47 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 7444fadfb9816..91f653c4105cd 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -248,7 +248,8 @@ private static PathTrie defaultHandlers(final String endpoint, f inputStreamBody = new GZIPInputStream(inputStreamBody); } } - // Read line by line ?both? parts of the multipart + // Read line by line ?both? parts of the multipart. Decoding headers as + // IS_8859_1 is safe. try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.ISO_8859_1))) { String line; // read first part delimiter @@ -275,15 +276,19 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.INTERNAL_SERVER_ERROR, "Error parsing multipart request. Metadata part expected to have the \"application/json\" content type."); } - // read metadata part + // read metadata part, a single line line = reader.readLine(); + final byte[] metadata = line.getBytes(StandardCharsets.ISO_8859_1); + if ((firstPartContentTypes != null) && (firstPartContentTypes.stream().anyMatch((x -> x.contains("charset=utf-8"))))) { + // decode as utf-8 + line = new String(metadata, StandardCharsets.UTF_8); + } final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); objectNameMatcher.find(); final String objectName = objectNameMatcher.group(1); final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); bucketNameMatcher.find(); final String bucketName = bucketNameMatcher.group(1); - final byte[] metadata = line.getBytes(StandardCharsets.ISO_8859_1); // read second part delimiter line = reader.readLine(); if ((line == null) || (line.equals("--" + boundary) == false)) { @@ -362,44 +367,16 @@ private static PathTrie defaultHandlers(final String endpoint, f return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucket.name, objectId, body)); }); - // Copy Object - // - // https://cloud.google.com/storage/docs/json_api/v1/objects/copy - handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/copyTo/b/{destBucket}/o/{dest}", (params, headers, body)-> { - final String source = params.get("src"); - if (Strings.hasText(source) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); - } - - final Bucket srcBucket = buckets.get(params.get("srcBucket")); - if (srcBucket == null) { - return newError(RestStatus.NOT_FOUND, "source bucket not found"); - } - - final String dest = params.get("dest"); - if (Strings.hasText(dest) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); - } - - final Bucket destBucket = buckets.get(params.get("destBucket")); - if (destBucket == null) { - return newError(RestStatus.NOT_FOUND, "destination bucket not found"); - } - - final byte[] sourceBytes = srcBucket.objects.get(source); - if (sourceBytes == null) { - return newError(RestStatus.NOT_FOUND, "source object not found"); - } - - destBucket.objects.put(dest, sourceBytes); - return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); - }); - - // Rewrite Object + // Rewrite or Copy Object // // https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite - handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/rewriteTo/b/{destBucket}/o/{dest}", + // https://cloud.google.com/storage/docs/json_api/v1/objects/copy + handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/{action}/b/{destBucket}/o/{dest}", (params, headers, body) -> { + final String action = params.get("action"); + if ((action.equals("rewriteTo") == false) && (action.equals("copyTo") == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "Action not implemented. None of \"rewriteTo\" or \"copyTo\"."); + } final String source = params.get("src"); if (Strings.hasText(source) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); @@ -421,9 +398,14 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "source object not found"); } destBucket.objects.put(dest, sourceBytes); - final XContentBuilder respBuilder = jsonBuilder(); - buildRewriteResponse(respBuilder, destBucket.name, dest, sourceBytes.length); - return newResponse(RestStatus.OK, emptyMap(), respBuilder); + if (action.equals("rewriteTo")) { + final XContentBuilder respBuilder = jsonBuilder(); + buildRewriteResponse(respBuilder, destBucket.name, dest, sourceBytes.length); + return newResponse(RestStatus.OK, emptyMap(), respBuilder); + } else { + assert action.equals("copyTo"); + return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); + } }); // List Objects diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index a7ab41b8e45e4..f563a035c7ffe 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -31,9 +31,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; -import java.io.IOException; import java.net.URI; -import java.security.GeneralSecurityException; import java.util.Map; public class GoogleCloudStorageService extends AbstractComponent { @@ -53,7 +51,7 @@ public GoogleCloudStorageService(Environment environment, Map Date: Thu, 10 May 2018 17:57:15 +0300 Subject: [PATCH 37/45] HttpConnectionFactory --- .../gcs/GoogleCloudStorageClientSettings.java | 14 +++---- .../gcs/GoogleCloudStorageService.java | 39 +++++++++++++++++-- .../gcs/GoogleCloudStorageServiceTests.java | 2 +- 3 files changed, 44 insertions(+), 11 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 2f10612e2b40c..d2fca7458c5eb 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -80,17 +80,17 @@ public class GoogleCloudStorageClientSettings { /** Name used by the client when it uses the Google Cloud JSON API. **/ static final Setting.AffixSetting APPLICATION_NAME_SETTING = Setting.affixKeySetting(PREFIX, "application_name", - key -> new Setting<>(key, "elasticsearch-repository-gcs", Function.identity(), Setting.Property.NodeScope, + key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated)); /** The credentials used by the client to connect to the Storage endpoint **/ private final ServiceAccountCredentials credential; /** - * The Storage root URL (hostname) the client should talk to, or null string to - * use the default. + * The Storage endpoint URL the client should talk to, or null string to use the + * default **/ - private final String host; + private final String endpoint; /** * The Google project ID overriding the default way to infer it. Null value sets @@ -111,14 +111,14 @@ public class GoogleCloudStorageClientSettings { private final URI tokenUri; GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, - final String host, + final String endpoint, final String projectId, final TimeValue connectTimeout, final TimeValue readTimeout, final String applicationName, final URI tokenUri) { this.credential = credential; - this.host = host; + this.endpoint = endpoint; this.projectId = projectId; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; @@ -131,7 +131,7 @@ public ServiceAccountCredentials getCredential() { } public String getHost() { - return host; + return endpoint; } public String getProjectId() { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index f563a035c7ffe..91a636d208d86 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -19,7 +19,9 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; +import com.google.api.client.googleapis.GoogleUtils; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.javanet.DefaultConnectionFactory; import com.google.api.client.http.javanet.NetHttpTransport; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.http.HttpTransportOptions; @@ -31,7 +33,12 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; + +import java.io.IOException; +import java.net.HttpURLConnection; import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; import java.util.Map; public class GoogleCloudStorageService extends AbstractComponent { @@ -57,12 +64,12 @@ public Storage createClient(final String clientName) throws Exception { throw new IllegalArgumentException("Unknown client name [" + clientName + "]. Existing client configs: " + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); } - final NetHttpTransport netHttpTransport = GoogleNetHttpTransport.newTrustedTransport(); + final HttpTransport httpTransport = createHttpTransport(clientSettings.getHost()); final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder() .setConnectTimeout(toTimeout(clientSettings.getConnectTimeout())) .setReadTimeout(toTimeout(clientSettings.getReadTimeout())) // requires 'java.lang.RuntimePermission "setFactory"' - .setHttpTransportFactory(() -> netHttpTransport) + .setHttpTransportFactory(() -> httpTransport) .build(); final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() .setTransportOptions(httpTransportOptions) @@ -96,6 +103,32 @@ public Storage createClient(final String clientName) throws Exception { return storageOptionsBuilder.build().getService(); } + HttpTransport createHttpTransport(final String endpoint) throws Exception { + final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); + builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); + if (Strings.hasLength(endpoint)) { + final URL endpointUrl = URI.create(endpoint).toURL(); + builder.setConnectionFactory(new DefaultConnectionFactory() { + @Override + public HttpURLConnection openConnection(final URL originalUrl) throws IOException { + if (originalUrl.getHost().equals(endpointUrl.getHost()) && originalUrl.getPort() == endpointUrl.getPort() + && originalUrl.getProtocol().equals(endpointUrl.getProtocol())) { + super.openConnection(originalUrl); + } + URI originalUri; + try { + originalUri = originalUrl.toURI(); + } catch (final URISyntaxException e) { + throw new RuntimeException(e); + } + return super.openConnection(new URL(endpointUrl.getProtocol(), endpointUrl.getHost(), endpointUrl.getPort(), + originalUri.getRawPath() + "?" + originalUri.getRawQuery())); + } + }); + } + return builder.build(); + } + /** * Converts timeout values from the settings to a timeout value for the Google * Cloud SDK diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 2ff941b10311d..a33ae90c549bc 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -43,7 +43,7 @@ public void testClientInitializer() throws Exception { final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); final String applicationName = randomAlphaOfLength(4); - final String hostName = randomAlphaOfLength(4); + final String hostName = randomFrom("http://", "https://") + randomAlphaOfLength(4) + ":" + randomIntBetween(1, 65535); final String projectIdName = randomAlphaOfLength(4); final Settings settings = Settings.builder() .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), From 25f502336af46b48e63f647059ad5c72efc6ddcd Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 10 May 2018 18:13:25 +0300 Subject: [PATCH 38/45] Renames --- .../repositories/gcs/GoogleCloudStorageTestServer.java | 4 +++- .../repositories/gcs/GoogleCloudStorageBlobStore.java | 10 ++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 91f653c4105cd..0ebc6aee6c078 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -707,7 +707,9 @@ private static XContentBuilder buildObjectResource(final XContentBuilder builder * Builds the rewrite response as defined by * https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite */ - private static XContentBuilder buildRewriteResponse(final XContentBuilder builder, final String destBucket, final String dest, + private static XContentBuilder buildRewriteResponse(final XContentBuilder builder, + final String destBucket, + final String dest, final int byteSize) throws IOException { builder.startObject() .field("kind", "storage#rewriteResponse") diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 67dc04c8c86ba..7c1c8b1e0564f 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -56,14 +56,15 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore { - private final Storage storage; - private final String bucket; // The recommended maximum size of a blob that should be uploaded in a single // request. Larger files should be uploaded over multiple requests (this is // called "resumable upload") // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload private static final int LARGE_BLOB_THRESHOLD_BYTE_SIZE = 5 * 1024 * 1024; + private final Storage storage; + private final String bucket; + GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storage) { super(settings); this.bucket = bucket; @@ -275,10 +276,7 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if (blobNames == null || blobNames.isEmpty()) { - return; - } - if (blobNames.size() < 3) { + if (blobNames.size() < 2) { for (final String blobName : blobNames) { deleteBlob(blobName); } From 8f0c139fd1cd730f41979901096ef4c17d32a2ab Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 10 May 2018 20:53:51 +0300 Subject: [PATCH 39/45] Mock with foreign packages --- .../cloud/storage/StorageRpcOptionUtils.java | 54 ++ .../cloud/storage/StorageTestUtils.java | 37 ++ ...eCloudStorageBlobStoreRepositoryTests.java | 4 +- .../repositories/gcs/MockStorage.java | 547 ++++++++---------- 4 files changed, 322 insertions(+), 320 deletions(-) create mode 100644 plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java create mode 100644 plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java diff --git a/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java new file mode 100644 index 0000000000000..f2b8a0571ad87 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.google.cloud.storage; + +import com.google.cloud.storage.spi.v1.StorageRpc; + +import static org.mockito.Mockito.mock; + +/** + * Utility class that exposed Google SDK package protected methods to + * create specific StorageRpc objects in unit tests. + */ +public class StorageRpcOptionUtils { + + private StorageRpcOptionUtils(){} + + public static String getPrefix(final Storage.BlobListOption... options) { + if (options != null) { + for (final Option option : options) { + final StorageRpc.Option rpcOption = option.getRpcOption(); + if (StorageRpc.Option.PREFIX.equals(rpcOption)) { + return (String) option.getValue(); + } + } + } + return null; + } + + public static CopyWriter createCopyWriter(final Blob result) { + return new CopyWriter(mock(StorageOptions.class), mock(StorageRpc.RewriteResponse.class)) { + @Override + public Blob getResult() { + return result; + } + }; + } +} diff --git a/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java new file mode 100644 index 0000000000000..68175d7f1be53 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.google.cloud.storage; + +/** + * Utility class that exposed Google SDK package protected methods to + * create buckets and blobs objects in unit tests. + */ +public class StorageTestUtils { + + private StorageTestUtils(){} + + public static Bucket createBucket(final Storage storage, final String bucketName) { + return new Bucket(storage, (BucketInfo.BuilderImpl) BucketInfo.newBuilder(bucketName)); + } + + public static Blob createBlob(final Storage storage, final String bucketName, final String blobName, final long blobSize) { + return new Blob(storage, (BlobInfo.BuilderImpl) BlobInfo.newBuilder(bucketName, blobName).setSize(blobSize)); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 260cc7a93103c..c4d9b67899672 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -19,9 +19,7 @@ package org.elasticsearch.repositories.gcs; -import com.google.cloud.storage.Blob; import com.google.cloud.storage.Storage; - import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -45,7 +43,7 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos // Static list of blobs shared among all nodes in order to act like a remote repository service: // all nodes must see the same content - private static final ConcurrentMap blobs = new ConcurrentHashMap<>(); + private static final ConcurrentMap blobs = new ConcurrentHashMap<>(); @Override protected Collection> nodePlugins() { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 00f5fd0ef6e8e..2b52b7a32a9cc 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,16 +19,12 @@ package org.elasticsearch.repositories.gcs; -import org.elasticsearch.common.SuppressForbidden; -import org.mockito.Matchers; - import com.google.api.gax.paging.Page; import com.google.cloud.Policy; import com.google.cloud.ReadChannel; import com.google.cloud.RestorableState; import com.google.cloud.WriteChannel; import com.google.cloud.storage.Acl; -import com.google.cloud.storage.Acl.Entity; import com.google.cloud.storage.Blob; import com.google.cloud.storage.BlobId; import com.google.cloud.storage.BlobInfo; @@ -37,25 +33,29 @@ import com.google.cloud.storage.CopyWriter; import com.google.cloud.storage.ServiceAccount; import com.google.cloud.storage.Storage; -import com.google.cloud.storage.spi.v1.StorageRpc; import com.google.cloud.storage.StorageBatch; +import com.google.cloud.storage.StorageException; import com.google.cloud.storage.StorageOptions; +import com.google.cloud.storage.StorageRpcOptionUtils; +import com.google.cloud.storage.StorageTestUtils; + +import org.elasticsearch.core.internal.io.IOUtils; +import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.lang.reflect.Method; import java.net.URL; import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; +import java.util.Objects; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; - -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import java.util.stream.Collectors; /** * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs @@ -63,133 +63,86 @@ */ class MockStorage implements Storage { - private final Bucket theBucket; - private final ConcurrentMap blobsMap; - - @SuppressForbidden(reason = "mocking here requires reflection that trespasses the access system") - MockStorage(final String bucketName, final ConcurrentMap blobsMap) { - this.blobsMap = blobsMap; - // mock bucket - this.theBucket = mock(Bucket.class); - when(this.theBucket.getName()).thenReturn(bucketName); - doAnswer(invocation -> { - assert invocation.getArguments().length == 1 : "Only a single filter is mocked"; - final BlobListOption prefixFilter = (BlobListOption) invocation.getArguments()[0]; - final Method optionMethod = BlobListOption.class.getSuperclass().getDeclaredMethod("getRpcOption"); - optionMethod.setAccessible(true); - assert StorageRpc.Option.PREFIX.equals(optionMethod.invoke(prefixFilter)) : "Only the prefix filter is mocked"; - final Method valueMethod = BlobListOption.class.getSuperclass().getDeclaredMethod("getValue"); - valueMethod.setAccessible(true); - final String prefixValue = (String) valueMethod.invoke(prefixFilter); - return new Page() { - @Override - public boolean hasNextPage() { - return false; - } - - @Override - public String getNextPageToken() { - return null; - } - - @Override - public Page getNextPage() { - return null; - } - - @Override - public Iterable iterateAll() { - return getValues(); - } - - @Override - public Iterable getValues() { - return () -> MockStorage.this.blobsMap.entrySet() - .stream() - .filter(entry1 -> entry1.getKey().startsWith(prefixValue)) - .map(entry2 -> entry2.getValue()) - .iterator(); - } - }; - }).when(this.theBucket).list(Matchers.anyVararg()); - } + private final String bucketName; + private final ConcurrentMap blobs; - @Override - public StorageOptions getOptions() { - return StorageOptions.getDefaultInstance(); - } - - @Override - public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { - throw new RuntimeException("Mock not implemented"); + MockStorage(final String bucket, final ConcurrentMap blobs) { + this.bucketName = Objects.requireNonNull(bucket); + this.blobs = Objects.requireNonNull(blobs); } @Override - public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { - return constructMockBlob(blobInfo.getName(), new byte[0], blobsMap); + public Bucket get(String bucket, BucketGetOption... options) { + if (bucketName.equals(bucket)) { + return StorageTestUtils.createBucket(this, bucketName); + } else { + return null; + } } @Override - public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { - return constructMockBlob(blobInfo.getName(), content, blobsMap); + public Blob get(BlobId blob) { + if (bucketName.equals(blob.getBucket())) { + final byte[] bytes = blobs.get(blob.getName()); + if (bytes != null) { + return StorageTestUtils.createBlob(this, bucketName, blob.getName(), bytes.length); + } + } + return null; } @Override - public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { - throw new RuntimeException("Mock not implemented"); + public boolean delete(BlobId blob) { + if (bucketName.equals(blob.getBucket()) && blobs.containsKey(blob.getName())) { + return blobs.remove(blob.getName()) != null; + } + return false; } @Override - public Bucket get(String bucketName, BucketGetOption... options) { - assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; - return theBucket; + public List delete(Iterable blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(delete(blobId)); + } + return ans; } @Override - public Blob get(String bucketName, String blobName, BlobGetOption... options) { - assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; - return blobsMap.get(blobName); + public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { + if (bucketName.equals(blobInfo.getBucket()) == false) { + throw new StorageException(404, "Bucket not found"); + } + blobs.put(blobInfo.getName(), content); + return get(BlobId.of(blobInfo.getBucket(), blobInfo.getName())); } @Override - public Blob get(BlobId blob, BlobGetOption... options) { - return get(blob.getBucket(), blob.getName()); - } + public CopyWriter copy(CopyRequest copyRequest) { + if (bucketName.equals(copyRequest.getSource().getBucket()) == false) { + throw new StorageException(404, "Source bucket not found"); + } + if (bucketName.equals(copyRequest.getTarget().getBucket()) == false) { + throw new StorageException(404, "Target bucket not found"); + } - @Override - public Blob get(BlobId blob) { - return get(blob.getBucket(), blob.getName()); + final byte[] bytes = blobs.get(copyRequest.getSource().getName()); + if (bytes == null) { + throw new StorageException(404, "Source blob does not exist"); + } + blobs.put(copyRequest.getTarget().getName(), bytes); + return StorageRpcOptionUtils + .createCopyWriter(get(BlobId.of(copyRequest.getTarget().getBucket(), copyRequest.getTarget().getName()))); } @Override - public Page list(BucketListOption... options) { - return new Page() { - @Override - public boolean hasNextPage() { - return false; - } - @Override - public String getNextPageToken() { - return null; - } - @Override - public Page getNextPage() { - return null; - } - @Override - public Iterable iterateAll() { - return getValues(); - } - @Override - public Iterable getValues() { - return Arrays.asList(theBucket); - } - }; - } + public Page list(String bucket, BlobListOption... options) { + if (bucketName.equals(bucket) == false) { + throw new StorageException(404, "Bucket not found"); + } + final Storage storage = this; + final String prefix = StorageRpcOptionUtils.getPrefix(options); - @Override - public Page list(String bucketName, BlobListOption... options) { - assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; return new Page() { @Override public boolean hasNextPage() { @@ -203,381 +156,341 @@ public String getNextPageToken() { @Override public Page getNextPage() { - return null; + throw new UnsupportedOperationException(); } @Override public Iterable iterateAll() { - return getValues(); + return blobs.entrySet().stream() + .filter(blob -> ((prefix == null) || blob.getKey().startsWith(prefix))) + .map(blob -> StorageTestUtils.createBlob(storage, bucketName, blob.getKey(), blob.getValue().length)) + .collect(Collectors.toList()); } @Override public Iterable getValues() { - return blobsMap.values(); + throw new UnsupportedOperationException(); } }; } @Override - public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { - throw new RuntimeException("Mock not implemented"); + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { + if (bucketName.equals(blob.getBucket())) { + final byte[] bytes = blobs.get(blob.getName()); + final ReadableByteChannel readableByteChannel = Channels.newChannel(new ByteArrayInputStream(bytes)); + return new ReadChannel() { + @Override + public void close() { + IOUtils.closeWhileHandlingException(readableByteChannel); + } + + @Override + public void seek(long position) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public void setChunkSize(int chunkSize) { + throw new UnsupportedOperationException(); + } + + @Override + public RestorableState capture() { + throw new UnsupportedOperationException(); + } + + @Override + public int read(ByteBuffer dst) throws IOException { + return readableByteChannel.read(dst); + } + + @Override + public boolean isOpen() { + return readableByteChannel.isOpen(); + } + }; + } + return null; } @Override - public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { - throw new RuntimeException("Mock not implemented"); + public WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { + if (bucketName.equals(blobInfo.getBucket())) { + final ByteArrayOutputStream output = new ByteArrayOutputStream(); + return new WriteChannel() { + + final WritableByteChannel writableByteChannel = Channels.newChannel(output); + + @Override + public void setChunkSize(int chunkSize) { + throw new UnsupportedOperationException(); + } + + @Override + public RestorableState capture() { + throw new UnsupportedOperationException(); + } + + @Override + public int write(ByteBuffer src) throws IOException { + return writableByteChannel.write(src); + } + + @Override + public boolean isOpen() { + return writableByteChannel.isOpen(); + } + + @Override + public void close() throws IOException { + IOUtils.closeWhileHandlingException(writableByteChannel); + blobs.put(blobInfo.getName(), output.toByteArray()); + } + }; + } + return null; } + // Everything below this line is not implemented. + @Override - public Blob update(BlobInfo blobInfo) { - throw new RuntimeException("Mock not implemented"); + public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { + return null; } @Override - public boolean delete(String bucket, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { + return null; } @Override - public boolean delete(String bucketName, String blobName, BlobSourceOption... options) { - assert bucketName.equals(this.theBucket.getName()) : "Only a single bucket is mocked"; - return blobsMap.remove(blobName) != null; + public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { + return null; } @Override - public boolean delete(BlobId blob, BlobSourceOption... options) { - return delete(blob.getBucket(), blob.getName()); + public Blob get(String bucket, String blob, BlobGetOption... options) { + return null; } @Override - public boolean delete(BlobId blob) { - return delete(blob.getBucket(), blob.getName()); + public Blob get(BlobId blob, BlobGetOption... options) { + return null; } @Override - public Blob compose(ComposeRequest composeRequest) { - throw new RuntimeException("Mock not implemented"); + public Page list(BucketListOption... options) { + return null; } @Override - public CopyWriter copy(CopyRequest copyRequest) { - assert copyRequest.getSource().getBucket().equals(this.theBucket.getName()) : "Only a single bucket is mocked"; - assert copyRequest.getTarget().getBucket().equals(this.theBucket.getName()) : "Only a single bucket is mocked"; - final Blob sourceBlob = blobsMap.get(copyRequest.getSource().getName()); - return sourceBlob.copyTo(copyRequest.getTarget().getBucket(), copyRequest.getTarget().getName()); + public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { + return null; } @Override - public byte[] readAllBytes(String bucketName, String blobName, BlobSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { + return null; } @Override - public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public Blob update(BlobInfo blobInfo) { + return null; } @Override - public StorageBatch batch() { - throw new RuntimeException("Mock not implemented"); + public boolean delete(String bucket, BucketSourceOption... options) { + return false; } @Override - public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public boolean delete(String bucket, String blob, BlobSourceOption... options) { + return false; } @Override - public ReadChannel reader(BlobId blob, BlobSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public boolean delete(BlobId blob, BlobSourceOption... options) { + return false; } @Override - public WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { - assert blobInfo.getBucket().equals(this.theBucket.getName()) : "Only a single bucket is mocked"; - final ByteArrayOutputStream baos = new ByteArrayOutputStream(); - return new WriteChannel() { - private boolean isOpenFlag = true; - - @Override - public boolean isOpen() { - return isOpenFlag; - } + public Blob compose(ComposeRequest composeRequest) { + return null; + } - @Override - public void close() throws IOException { - constructMockBlob(blobInfo.getName(), baos.toByteArray(), blobsMap); - isOpenFlag = false; - } + @Override + public byte[] readAllBytes(String bucket, String blob, BlobSourceOption... options) { + return new byte[0]; + } - @Override - public int write(ByteBuffer src) throws IOException { - final int size1 = baos.size(); - while (src.hasRemaining()) { - baos.write(src.get()); - } - final int size2 = baos.size(); - return size2 - size1; - } + @Override + public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { + return new byte[0]; + } - @Override - public void setChunkSize(int chunkSize) { - } + @Override + public StorageBatch batch() { + return null; + } - @Override - public RestorableState capture() { - return null; - } - }; + @Override + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + return null; } @Override public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List get(BlobId... blobIds) { - final List ans = new ArrayList<>(); - for (final BlobId blobId : blobIds) { - ans.add(get(blobId)); - } - return ans; + return null; } @Override public List get(Iterable blobIds) { - final List ans = new ArrayList<>(); - for (final BlobId blobId : blobIds) { - ans.add(get(blobId)); - } - return ans; + return null; } @Override public List update(BlobInfo... blobInfos) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List update(Iterable blobInfos) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List delete(BlobId... blobIds) { - final List ans = new ArrayList<>(); - for (final BlobId blobId : blobIds) { - ans.add(delete(blobId)); - } - return ans; - } - - @Override - public List delete(Iterable blobIds) { - final List ans = new ArrayList<>(); - for (final BlobId blobId : blobIds) { - ans.add(delete(blobId)); - } - return ans; + return null; } @Override - public Acl getAcl(String bucket, Entity entity, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public Acl getAcl(String bucket, Acl.Entity entity, BucketSourceOption... options) { + return null; } @Override - public Acl getAcl(String bucket, Entity entity) { - throw new RuntimeException("Mock not implemented"); + public Acl getAcl(String bucket, Acl.Entity entity) { + return null; } @Override - public boolean deleteAcl(String bucket, Entity entity, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + public boolean deleteAcl(String bucket, Acl.Entity entity, BucketSourceOption... options) { + return false; } @Override - public boolean deleteAcl(String bucket, Entity entity) { - throw new RuntimeException("Mock not implemented"); + public boolean deleteAcl(String bucket, Acl.Entity entity) { + return false; } @Override public Acl createAcl(String bucket, Acl acl, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Acl createAcl(String bucket, Acl acl) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Acl updateAcl(String bucket, Acl acl, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Acl updateAcl(String bucket, Acl acl) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List listAcls(String bucket, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List listAcls(String bucket) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override - public Acl getDefaultAcl(String bucket, Entity entity) { - throw new RuntimeException("Mock not implemented"); + public Acl getDefaultAcl(String bucket, Acl.Entity entity) { + return null; } @Override - public boolean deleteDefaultAcl(String bucket, Entity entity) { - throw new RuntimeException("Mock not implemented"); + public boolean deleteDefaultAcl(String bucket, Acl.Entity entity) { + return false; } @Override public Acl createDefaultAcl(String bucket, Acl acl) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Acl updateDefaultAcl(String bucket, Acl acl) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List listDefaultAcls(String bucket) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override - public Acl getAcl(BlobId blob, Entity entity) { - throw new RuntimeException("Mock not implemented"); + public Acl getAcl(BlobId blob, Acl.Entity entity) { + return null; } @Override - public boolean deleteAcl(BlobId blob, Entity entity) { - throw new RuntimeException("Mock not implemented"); + public boolean deleteAcl(BlobId blob, Acl.Entity entity) { + return false; } @Override public Acl createAcl(BlobId blob, Acl acl) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Acl updateAcl(BlobId blob, Acl acl) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List listAcls(BlobId blob) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Policy getIamPolicy(String bucket, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public Policy setIamPolicy(String bucket, Policy policy, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public List testIamPermissions(String bucket, List permissions, BucketSourceOption... options) { - throw new RuntimeException("Mock not implemented"); + return null; } @Override public ServiceAccount getServiceAccount(String projectId) { - throw new RuntimeException("Mock not implemented"); + return null; } - private static class ReadChannelFromByteArray implements ReadChannel { - private boolean isOpenFlag; - private final ByteBuffer byteBuffer; - - ReadChannelFromByteArray(byte[] srcArray) { - final byte[] clonedArray = Arrays.copyOf(srcArray, srcArray.length); - byteBuffer = ByteBuffer.wrap(clonedArray); - isOpenFlag = byteBuffer.hasRemaining(); - } - - @Override - public boolean isOpen() { - return isOpenFlag; - } - - @Override - public int read(ByteBuffer dst) throws IOException { - if (byteBuffer.hasRemaining() == false) { - return -1; - } - final int size1 = dst.remaining(); - while (dst.hasRemaining() && byteBuffer.hasRemaining()) { - dst.put(byteBuffer.get()); - } - final int size2 = dst.remaining(); - return size1 - size2; - } - - @Override - public void setChunkSize(int chunkSize) { - } - - @Override - public void seek(long position) throws IOException { - byteBuffer.position(Math.toIntExact(position)); - } - - @Override - public void close() { - isOpenFlag = false; - } - - @Override - public RestorableState capture() { - return null; - } - } - - private static Blob constructMockBlob(String blobName, byte[] data, ConcurrentMap blobsMap) { - final Blob blobMock = mock(Blob.class); - when(blobMock.getName()).thenReturn(blobName); - when(blobMock.getSize()).thenReturn((long) data.length); - when(blobMock.reload(Matchers.anyVararg())).thenReturn(blobMock); - doAnswer(invocation -> { - return new ReadChannelFromByteArray(data); - }).when(blobMock).reader(Matchers.anyVararg()); - when(blobMock.copyTo(Matchers.anyString(), Matchers.anyVararg())) - .thenThrow(new RuntimeException("Mock not implemented. Only a single bucket is mocked.")); - doAnswer(invocation -> { - final String copiedBlobName = (String) invocation.getArguments()[1]; - final Blob copiedMockBlob = constructMockBlob(copiedBlobName, data, blobsMap); - final CopyWriter ans = mock(CopyWriter.class); - when(ans.getResult()).thenReturn(copiedMockBlob); - when(ans.isDone()).thenReturn(true); - return ans; - }).when(blobMock).copyTo(Matchers.anyString(), Matchers.anyString(), Matchers.anyVararg()); - doAnswer(invocation -> { - final BlobId blobId = (BlobId) invocation.getArguments()[0]; - final Blob copiedMockBlob = constructMockBlob(blobId.getName(), data, blobsMap); - final CopyWriter ans = mock(CopyWriter.class); - when(ans.getResult()).thenReturn(copiedMockBlob); - when(ans.isDone()).thenReturn(true); - return ans; - }).when(blobMock).copyTo(Matchers.any(BlobId.class), Matchers.anyVararg()); - blobsMap.put(blobName, blobMock); - return blobMock; + @Override + public StorageOptions getOptions() { + return null; } - } From 824def819dd68f5129f7792eb059d15fab7a0e0b Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 11 May 2018 08:47:05 +0300 Subject: [PATCH 40/45] Reformatting --- .../gcs/GoogleCloudStorageTestServer.java | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 0ebc6aee6c078..23e61c399274d 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -681,8 +681,7 @@ private static XContentBuilder buildBucketResource(final String name) throws IOE * Storage Object JSON representation as defined in * https://cloud.google.com/storage/docs/json_api/v1/objects#resource */ - private static XContentBuilder buildObjectResource(final String bucket, final String name, final byte[] bytes) - throws IOException { + private static XContentBuilder buildObjectResource(final String bucket, final String name, final byte[] bytes) throws IOException { return buildObjectResource(jsonBuilder(), bucket, name, bytes); } @@ -712,17 +711,17 @@ private static XContentBuilder buildRewriteResponse(final XContentBuilder builde final String dest, final int byteSize) throws IOException { builder.startObject() - .field("kind", "storage#rewriteResponse") - .field("totalBytesRewritten", String.valueOf(byteSize)) - .field("objectSize", String.valueOf(byteSize)) - .field("done", true) - .startObject("resource") - .field("kind", "storage#object") - .field("id", String.join("/", destBucket, dest)) - .field("name", dest) - .field("bucket", destBucket) - .field("size", String.valueOf(byteSize)) - .endObject() + .field("kind", "storage#rewriteResponse") + .field("totalBytesRewritten", String.valueOf(byteSize)) + .field("objectSize", String.valueOf(byteSize)) + .field("done", true) + .startObject("resource") + .field("kind", "storage#object") + .field("id", String.join("/", destBucket, dest)) + .field("name", dest) + .field("bucket", destBucket) + .field("size", String.valueOf(byteSize)) + .endObject() .endObject(); return builder; } From 0a73dbc9a198d5872e15169c4e2b41931df123c5 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 11 May 2018 09:19:03 +0300 Subject: [PATCH 41/45] GoogleCloudStorageService override URL path --- .../gcs/GoogleCloudStorageService.java | 35 +++++++++++++------ 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 91a636d208d86..ad1143a5d791e 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -46,7 +46,7 @@ public class GoogleCloudStorageService extends AbstractComponent { /** Clients settings identified by client name. */ private final Map clientsSettings; - public GoogleCloudStorageService(Environment environment, Map clientsSettings) { + public GoogleCloudStorageService(final Environment environment, final Map clientsSettings) { super(environment.settings()); this.clientsSettings = clientsSettings; } @@ -54,8 +54,7 @@ public GoogleCloudStorageService(Environment environment, Map httpTransport) .build(); final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() @@ -103,26 +101,43 @@ public Storage createClient(final String clientName) throws Exception { return storageOptionsBuilder.build().getService(); } - HttpTransport createHttpTransport(final String endpoint) throws Exception { + /** + * Pins the TLS trust certificates and, more importantly, overrides connection + * URLs in the case of a custom endpoint setting because some connections don't + * fully honor this setting (bugs in the SDK). + **/ + private static HttpTransport createHttpTransport(final String endpoint) throws Exception { final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); + // requires java.lang.RuntimePermission "setFactory" builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); if (Strings.hasLength(endpoint)) { final URL endpointUrl = URI.create(endpoint).toURL(); builder.setConnectionFactory(new DefaultConnectionFactory() { @Override public HttpURLConnection openConnection(final URL originalUrl) throws IOException { + // test if the URL is built correctly, ie following the `host` setting if (originalUrl.getHost().equals(endpointUrl.getHost()) && originalUrl.getPort() == endpointUrl.getPort() && originalUrl.getProtocol().equals(endpointUrl.getProtocol())) { - super.openConnection(originalUrl); + return super.openConnection(originalUrl); } + // override connection URLs because some don't follow the config. See + // https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3254 and + // https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3255 URI originalUri; try { originalUri = originalUrl.toURI(); } catch (final URISyntaxException e) { throw new RuntimeException(e); } - return super.openConnection(new URL(endpointUrl.getProtocol(), endpointUrl.getHost(), endpointUrl.getPort(), - originalUri.getRawPath() + "?" + originalUri.getRawQuery())); + String overridePath = "/"; + if (originalUri.getRawPath() != null) { + overridePath = originalUri.getRawPath(); + } + if (originalUri.getRawQuery() != null) { + overridePath += "?" + originalUri.getRawQuery(); + } + return super.openConnection( + new URL(endpointUrl.getProtocol(), endpointUrl.getHost(), endpointUrl.getPort(), overridePath)); } }); } @@ -133,9 +148,9 @@ public HttpURLConnection openConnection(final URL originalUrl) throws IOExceptio * Converts timeout values from the settings to a timeout value for the Google * Cloud SDK **/ - static Integer toTimeout(TimeValue timeout) { + static Integer toTimeout(final TimeValue timeout) { // Null or zero in settings means the default timeout - if ((timeout == null) || TimeValue.ZERO.equals(timeout)) { + if (timeout == null || TimeValue.ZERO.equals(timeout)) { // negative value means using the default value return -1; } From 1da918c4fecd36a5c724749a39181cb1131c68ff Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 11 May 2018 10:40:11 +0300 Subject: [PATCH 42/45] Nit: bucket != null --- .../repositories/gcs/GoogleCloudStorageBlobStore.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 7c1c8b1e0564f..ad4a7863f6b17 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -97,10 +97,7 @@ public void close() { boolean doesBucketExist(String bucketName) { try { final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> storage.get(bucketName)); - if (bucket != null) { - return Strings.hasText(bucket.getName()); - } - return false; + return bucket != null; } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); } From b3a1006b0b930ef8f6a86932da71e9ccec7d414c Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Fri, 11 May 2018 13:06:21 +0300 Subject: [PATCH 43/45] Documented project id --- docs/plugins/repository-gcs.asciidoc | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index a51200fb7fef0..367b20d65434f 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -84,11 +84,7 @@ A service account file looks like this: "private_key_id": "...", "private_key": "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n", "client_email": "service-account-for-your-repository@your-project-id.iam.gserviceaccount.com", - "client_id": "...", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://accounts.google.com/o/oauth2/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "..." + "client_id": "..." } ---- // NOTCONSOLE @@ -178,6 +174,12 @@ are marked as `Secure`. a custom name can be useful to authenticate your cluster when requests statistics are logged in the Google Cloud Platform. Default to `repository-gcs` +`project_id`:: + + The Google Cloud project id. This will be automatically infered from the credentials file but + can be specified explicitly. For example, it can be used to switch between projects when the + same credentials are usable for both the production and the development projects. + [[repository-gcs-repository]] ==== Repository Settings From e99c69e5ddee60a6eda3080fad23a0ae9fe76eb7 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 15 May 2018 10:26:28 +0300 Subject: [PATCH 44/45] Minor trimmings --- .../gcs/GoogleCloudStorageService.java | 4 ++- ...GoogleCloudStorageClientSettingsTests.java | 11 +++---- .../plugin-metadata/plugin-security.policy | 30 ------------------- .../src/test/resources/plugin-security.policy | 23 -------------- 4 files changed, 9 insertions(+), 59 deletions(-) delete mode 100644 plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy delete mode 100644 plugins/repository-gcs/src/test/resources/plugin-security.policy diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index ad1143a5d791e..57bcc4b131356 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -104,7 +104,9 @@ public Storage createClient(final String clientName) throws Exception { /** * Pins the TLS trust certificates and, more importantly, overrides connection * URLs in the case of a custom endpoint setting because some connections don't - * fully honor this setting (bugs in the SDK). + * fully honor this setting (bugs in the SDK). The default connection factory + * opens a new connection for each request. This is required for the storage + * instance to be thread-safe. **/ private static HttpTransport createHttpTransport(final String endpoint) throws Exception { final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index 2d70dc8e59e38..fa2ea158ec627 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -128,12 +128,12 @@ private static GoogleCloudStorageClientSettings randomClient(final String client final ServiceAccountCredentials credential = credentials.v1(); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).getKey(), credentials.v2()); - String host; + String endpoint; if (randomBoolean()) { - host = randomAlphaOfLength(5); - settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), host); + endpoint = randomAlphaOfLength(5); + settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); } else { - host = ENDPOINT_SETTING.getDefault(Settings.EMPTY); + endpoint = ENDPOINT_SETTING.getDefault(Settings.EMPTY); } String projectId; @@ -169,7 +169,8 @@ private static GoogleCloudStorageClientSettings randomClient(final String client applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); } - return new GoogleCloudStorageClientSettings(credential, host, projectId, connectTimeout, readTimeout, applicationName, new URI("")); + return new GoogleCloudStorageClientSettings(credential, endpoint, projectId, connectTimeout, readTimeout, applicationName, + new URI("")); } /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ diff --git a/plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy deleted file mode 100644 index a854829d6a229..0000000000000 --- a/plugins/repository-gcs/src/test/plugin-metadata/plugin-security.policy +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -grant { - permission java.lang.RuntimePermission "accessDeclaredMembers"; - permission java.lang.RuntimePermission "setFactory"; - //permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; - permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - permission java.net.URLPermission "https://www.googleapis.com/-", "*:*"; - permission java.net.URLPermission "https://accounts.google.com/-", "*:*"; - - // gcs client opens socket connections for to access repository - permission java.net.SocketPermission "*", "connect"; -}; diff --git a/plugins/repository-gcs/src/test/resources/plugin-security.policy b/plugins/repository-gcs/src/test/resources/plugin-security.policy deleted file mode 100644 index fece20dcba580..0000000000000 --- a/plugins/repository-gcs/src/test/resources/plugin-security.policy +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -grant { - permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; -}; - From 85d9bd920517bbf723a9bdeb265525a1b6d6c8e8 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 15 May 2018 16:04:30 +0300 Subject: [PATCH 45/45] Addressed feedback --- docs/plugins/repository-gcs.asciidoc | 2 +- plugins/repository-gcs/build.gradle | 2 -- .../gcs/GoogleCloudStorageTestServer.java | 2 +- .../gcs/GoogleCloudStorageBlobStore.java | 16 +++++----- .../gcs/GoogleCloudStorageClientSettings.java | 29 +++++++------------ ...GoogleCloudStorageClientSettingsTests.java | 14 ++++++++- 6 files changed, 32 insertions(+), 33 deletions(-) diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index 367b20d65434f..8cf2bc0a73c92 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -177,7 +177,7 @@ are marked as `Secure`. `project_id`:: The Google Cloud project id. This will be automatically infered from the credentials file but - can be specified explicitly. For example, it can be used to switch between projects when the + can be specified explicitly. For example, it can be used to switch between projects when the same credentials are usable for both the production and the development projects. [[repository-gcs-repository]] diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 8532c29e33fa0..07ef4b4be5e62 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -17,8 +17,6 @@ * under the License. */ -import org.elasticsearch.gradle.test.AntFixture - esplugin { description 'The GCS repository plugin adds Google Cloud Storage support for repositories.' classname 'org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 23e61c399274d..a9832ae318de4 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -209,7 +209,7 @@ private static PathTrie defaultHandlers(final String endpoint, f if (bucket == null) { return newError(RestStatus.NOT_FOUND, "bucket not found"); } - if (bucket.objects.put(objectName, EMPTY_BYTE) == null) { + if (bucket.objects.putIfAbsent(objectName, EMPTY_BYTE) == null) { final String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + objectName; return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index ad4a7863f6b17..5dc03ea45de03 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -28,7 +28,6 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.Storage.BlobListOption; import com.google.cloud.storage.Storage.CopyRequest; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; @@ -146,10 +145,7 @@ Map listBlobsByPrefix(String path, String prefix) throws I boolean blobExists(String blobName) throws IOException { final BlobId blobId = BlobId.of(bucket, blobName); final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); - if (blob != null) { - return Strings.hasText(blob.getName()); - } - return false; + return blob != null; } /** @@ -273,10 +269,12 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if (blobNames.size() < 2) { - for (final String blobName : blobNames) { - deleteBlob(blobName); - } + if (blobNames.isEmpty()) { + return; + } + // for a single op submit a simple delete instead of a batch of size 1 + if (blobNames.size() == 1) { + deleteBlob(blobNames.iterator().next()); return; } final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index d2fca7458c5eb..99df38413326c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -50,9 +50,7 @@ public class GoogleCloudStorageClientSettings { static final Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(PREFIX, "credentials_file", key -> SecureSetting.secureFile(key, null)); - /** - * An override for the Storage endpoint to connect to. - */ + /** An override for the Storage endpoint to connect to. */ static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", key -> Setting.simpleString(key, Setting.Property.NodeScope)); @@ -78,36 +76,29 @@ public class GoogleCloudStorageClientSettings { static final Setting.AffixSetting READ_TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "read_timeout", key -> timeSetting(key, TimeValue.ZERO, TimeValue.MINUS_ONE, Setting.Property.NodeScope)); - /** Name used by the client when it uses the Google Cloud JSON API. **/ + /** Name used by the client when it uses the Google Cloud JSON API. */ static final Setting.AffixSetting APPLICATION_NAME_SETTING = Setting.affixKeySetting(PREFIX, "application_name", - key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, - Setting.Property.Deprecated)); + key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated)); - /** The credentials used by the client to connect to the Storage endpoint **/ + /** The credentials used by the client to connect to the Storage endpoint. */ private final ServiceAccountCredentials credential; - /** - * The Storage endpoint URL the client should talk to, or null string to use the - * default - **/ + /** The Storage endpoint URL the client should talk to. Null value sets the default. */ private final String endpoint; - /** - * The Google project ID overriding the default way to infer it. Null value sets - * the default. - **/ + /** The Google project ID overriding the default way to infer it. Null value sets the default. */ private final String projectId; - /** The timeout to establish a connection **/ + /** The timeout to establish a connection */ private final TimeValue connectTimeout; - /** The timeout to read data from an established connection **/ + /** The timeout to read data from an established connection */ private final TimeValue readTimeout; - /** The Storage client application name **/ + /** The Storage client application name */ private final String applicationName; - /** The token server URI. This leases access tokens in the oauth flow. **/ + /** The token server URI. This leases access tokens in the oauth flow. */ private final URI tokenUri; GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index fa2ea158ec627..14cb4fa242e7d 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -93,6 +93,17 @@ public void testLoadCredential() throws Exception { assertGoogleCredential(expectedClientSettings.getCredential(), loadCredential(randomClient.v2(), clientName)); } + public void testProjectIdDefaultsToCredentials() throws Exception { + final String clientName = randomAlphaOfLength(5); + final Tuple credentials = randomCredential(clientName); + final ServiceAccountCredentials credential = credentials.v1(); + final GoogleCloudStorageClientSettings googleCloudStorageClientSettings = new GoogleCloudStorageClientSettings(credential, + ENDPOINT_SETTING.getDefault(Settings.EMPTY), PROJECT_ID_SETTING.getDefault(Settings.EMPTY), + CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY), READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY), + APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY), new URI("")); + assertEquals(credential.getProjectId(), googleCloudStorageClientSettings.getProjectId()); + } + /** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/ private Tuple, Settings> randomClients(final int nbClients, final List> deprecationWarnings) @@ -130,7 +141,8 @@ private static GoogleCloudStorageClientSettings randomClient(final String client String endpoint; if (randomBoolean()) { - endpoint = randomAlphaOfLength(5); + endpoint = randomFrom("http://www.elastic.co", "http://metadata.google.com:88/oauth", "https://www.googleapis.com", + "https://www.elastic.co:443", "http://localhost:8443", "https://www.googleapis.com/oauth/token"); settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); } else { endpoint = ENDPOINT_SETTING.getDefault(Settings.EMPTY);