1 // Copyright 2017 Google Inc.
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
7 // http://www.apache.org/licenses/LICENSE-2.0
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
17 package google.storagetransfer.v1;
19 import "google/api/annotations.proto";
20 import "google/protobuf/duration.proto";
21 import "google/protobuf/timestamp.proto";
22 import "google/rpc/code.proto";
23 import "google/type/date.proto";
24 import "google/type/timeofday.proto";
26 option cc_enable_arenas = true;
27 option csharp_namespace = "Google.Cloud.StorageTransfer.V1";
28 option go_package = "google.golang.org/genproto/googleapis/storagetransfer/v1;storagetransfer";
29 option java_outer_classname = "TransferTypes";
30 option java_package = "com.google.storagetransfer.v1.proto";
31 option php_namespace = "Google\\Cloud\\StorageTransfer\\V1";
33 // Google service account
34 message GoogleServiceAccount {
36 string account_email = 1;
39 // AWS access key (see
41 // Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)).
42 message AwsAccessKey {
45 string access_key_id = 1;
47 // AWS secret access key. This field is not returned in RPC responses.
49 string secret_access_key = 2;
52 // Conditions that determine which objects will be transferred.
53 message ObjectConditions {
54 // If unspecified, `minTimeElapsedSinceLastModification` takes a zero value
55 // and `maxTimeElapsedSinceLastModification` takes the maximum possible
56 // value of Duration. Objects that satisfy the object conditions
57 // must either have a `lastModificationTime` greater or equal to
58 // `NOW` - `maxTimeElapsedSinceLastModification` and less than
59 // `NOW` - `minTimeElapsedSinceLastModification`, or not have a
60 // `lastModificationTime`.
61 google.protobuf.Duration min_time_elapsed_since_last_modification = 1;
63 // `maxTimeElapsedSinceLastModification` is the complement to
64 // `minTimeElapsedSinceLastModification`.
65 google.protobuf.Duration max_time_elapsed_since_last_modification = 2;
67 // If `includePrefixes` is specified, objects that satisfy the object
68 // conditions must have names that start with one of the `includePrefixes`
69 // and that do not start with any of the `excludePrefixes`. If
70 // `includePrefixes` is not specified, all objects except those that have
71 // names starting with one of the `excludePrefixes` must satisfy the object
76 // * Each include-prefix and exclude-prefix can contain any sequence of
77 // Unicode characters, of max length 1024 bytes when UTF8-encoded, and
78 // must not contain Carriage Return or Line Feed characters. Wildcard
79 // matching and regular expression matching are not supported.
81 // * Each include-prefix and exclude-prefix must omit the leading slash.
82 // For example, to include the `requests.gz` object in a transfer from
83 // `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include
84 // prefix as `logs/y=2015/requests.gz`.
86 // * None of the include-prefix or the exclude-prefix values can be empty,
89 // * Each include-prefix must include a distinct portion of the object
90 // namespace, i.e., no include-prefix may be a prefix of another
93 // * Each exclude-prefix must exclude a distinct portion of the object
94 // namespace, i.e., no exclude-prefix may be a prefix of another
97 // * If `includePrefixes` is specified, then each exclude-prefix must start
98 // with the value of a path explicitly included by `includePrefixes`.
100 // The max size of `includePrefixes` is 1000.
101 repeated string include_prefixes = 3;
103 // `excludePrefixes` must follow the requirements described for
104 // `includePrefixes`.
106 // The max size of `excludePrefixes` is 1000.
107 repeated string exclude_prefixes = 4;
110 // In a GcsData, an object's name is the Google Cloud Storage object's name and
111 // its `lastModificationTime` refers to the object's updated time, which changes
112 // when the content or the metadata of the object is updated.
114 // Google Cloud Storage bucket name (see
116 // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)).
118 string bucket_name = 1;
121 // An AwsS3Data can be a data source, but not a data sink.
122 // In an AwsS3Data, an object's name is the S3 object's key name.
124 // S3 Bucket name (see
126 // bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)).
128 string bucket_name = 1;
130 // AWS access key used to sign the API requests to the AWS S3 bucket.
131 // Permissions on the bucket must be granted to the access ID of the
134 AwsAccessKey aws_access_key = 2;
137 // An HttpData specifies a list of objects on the web to be transferred over
138 // HTTP. The information of the objects to be transferred is contained in a
139 // file referenced by a URL. The first line in the file must be
140 // "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines
141 // specify the information of the list of objects, one object per list entry.
142 // Each entry has the following tab-delimited fields:
144 // * HTTP URL - The location of the object.
146 // * Length - The size of the object in bytes.
148 // * MD5 - The base64-encoded MD5 hash of the object.
150 // For an example of a valid TSV file, see
151 // [Transferring data from
152 // URLs](https://cloud.google.com/storage/transfer/create-url-list).
154 // When transferring data based on a URL list, keep the following in mind:
156 // * When an object located at `http(s)://hostname:port/<URL-path>` is
157 // transferred to a data sink, the name of the object at the data sink is
158 // `<hostname>/<URL-path>`.
160 // * If the specified size of an object does not match the actual size of the
161 // object fetched, the object will not be transferred.
163 // * If the specified MD5 does not match the MD5 computed from the transferred
164 // bytes, the object transfer will fail. For more information, see
165 // [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5)
167 // * Ensure that each URL you specify is publicly accessible. For
168 // example, in Google Cloud Storage you can
169 // [share an object publicly]
170 // (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get
173 // * Storage Transfer Service obeys `robots.txt` rules and requires the source
174 // HTTP server to support `Range` requests and to return a `Content-Length`
175 // header in each response.
177 // * [ObjectConditions](#ObjectConditions) have no effect when filtering objects
180 // The URL that points to the file that stores the object list entries.
181 // This file must allow public access. Currently, only URLs with HTTP and
182 // HTTPS schemes are supported.
187 // TransferOptions uses three boolean parameters to define the actions
188 // to be performed on objects in a transfer.
189 message TransferOptions {
190 // Whether overwriting objects that already exist in the sink is allowed.
191 bool overwrite_objects_already_existing_in_sink = 1;
193 // Whether objects that exist only in the sink should be deleted. Note that
194 // this option and `deleteObjectsFromSourceAfterTransfer` are mutually
196 bool delete_objects_unique_in_sink = 2;
198 // Whether objects should be deleted from the source after they are
199 // transferred to the sink. Note that this option and
200 // `deleteObjectsUniqueInSink` are mutually exclusive.
201 bool delete_objects_from_source_after_transfer = 3;
204 // Configuration for running a transfer.
205 message TransferSpec {
206 // The read source of the data.
208 // A Google Cloud Storage data source.
209 GcsData gcs_data_source = 1;
211 // An AWS S3 data source.
212 AwsS3Data aws_s3_data_source = 2;
214 // An HTTP URL data source.
215 HttpData http_data_source = 3;
218 // The write sink for the data.
220 // A Google Cloud Storage data sink.
221 GcsData gcs_data_sink = 4;
224 // Only objects that satisfy these object conditions are included in the set
225 // of data source and data sink objects. Object conditions based on
226 // objects' `lastModificationTime` do not exclude objects in a data sink.
227 ObjectConditions object_conditions = 5;
229 // If the option `deleteObjectsUniqueInSink` is `true`, object conditions
230 // based on objects' `lastModificationTime` are ignored and do not exclude
231 // objects in a data source or a data sink.
232 TransferOptions transfer_options = 6;
235 // Transfers can be scheduled to recur or to run just once.
237 // The first day the recurring transfer is scheduled to run. If
238 // `scheduleStartDate` is in the past, the transfer will run for the first
239 // time on the following day.
241 google.type.Date schedule_start_date = 1;
243 // The last day the recurring transfer will be run. If `scheduleEndDate`
244 // is the same as `scheduleStartDate`, the transfer will be executed only
246 google.type.Date schedule_end_date = 2;
248 // The time in UTC at which the transfer will be scheduled to start in a day.
249 // Transfers may start later than this time. If not specified, recurring and
250 // one-time transfers that are scheduled to run today will run immediately;
251 // recurring transfers that are scheduled to run on a future date will start
252 // at approximately midnight UTC on that date. Note that when configuring a
253 // transfer with the Cloud Platform Console, the transfer's start time in a
254 // day is specified in your local timezone.
255 google.type.TimeOfDay start_time_of_day = 3;
258 // This resource represents the configuration of a transfer job that runs
260 message TransferJob {
261 // The status of the transfer job.
263 // Zero is an illegal value.
264 STATUS_UNSPECIFIED = 0;
266 // New transfers will be performed based on the schedule.
269 // New transfers will not be scheduled.
272 // This is a soft delete state. After a transfer job is set to this
273 // state, the job and all the transfer executions are subject to
274 // garbage collection.
278 // A globally unique name assigned by Storage Transfer Service when the
279 // job is created. This field should be left empty in requests to create a new
280 // transfer job; otherwise, the requests result in an `INVALID_ARGUMENT`
284 // A description provided by the user for the job. Its max length is 1024
285 // bytes when Unicode-encoded.
286 string description = 2;
288 // The ID of the Google Cloud Platform Console project that owns the job.
289 string project_id = 3;
291 // Transfer specification.
292 TransferSpec transfer_spec = 4;
294 // Schedule specification.
295 Schedule schedule = 5;
297 // Status of the job. This value MUST be specified for
298 // `CreateTransferJobRequests`.
300 // NOTE: The effect of the new job status takes place during a subsequent job
301 // run. For example, if you change the job status from `ENABLED` to
302 // `DISABLED`, and an operation spawned by the transfer is running, the status
303 // change would not affect the current operation.
306 // This field cannot be changed by user requests.
307 google.protobuf.Timestamp creation_time = 7;
309 // This field cannot be changed by user requests.
310 google.protobuf.Timestamp last_modification_time = 8;
312 // This field cannot be changed by user requests.
313 google.protobuf.Timestamp deletion_time = 9;
316 // An entry describing an error that has occurred.
317 message ErrorLogEntry {
318 // A URL that refers to the target (a data source, a data sink,
319 // or an object) with which the error is associated.
323 // A list of messages that carry the error details.
324 repeated string error_details = 3;
327 // A summary of errors by error code, plus a count and sample error log
329 message ErrorSummary {
331 google.rpc.Code error_code = 1;
333 // Count of this type of error.
335 int64 error_count = 2;
338 repeated ErrorLogEntry error_log_entries = 3;
341 // A collection of counters that report the progress of a transfer operation.
342 message TransferCounters {
343 // Objects found in the data source that are scheduled to be transferred,
344 // excluding any that are filtered based on object conditions or skipped due
346 int64 objects_found_from_source = 1;
348 // Bytes found in the data source that are scheduled to be transferred,
349 // excluding any that are filtered based on object conditions or skipped due
351 int64 bytes_found_from_source = 2;
353 // Objects found only in the data sink that are scheduled to be deleted.
354 int64 objects_found_only_from_sink = 3;
356 // Bytes found only in the data sink that are scheduled to be deleted.
357 int64 bytes_found_only_from_sink = 4;
359 // Objects in the data source that are not transferred because they already
360 // exist in the data sink.
361 int64 objects_from_source_skipped_by_sync = 5;
363 // Bytes in the data source that are not transferred because they already
364 // exist in the data sink.
365 int64 bytes_from_source_skipped_by_sync = 6;
367 // Objects that are copied to the data sink.
368 int64 objects_copied_to_sink = 7;
370 // Bytes that are copied to the data sink.
371 int64 bytes_copied_to_sink = 8;
373 // Objects that are deleted from the data source.
374 int64 objects_deleted_from_source = 9;
376 // Bytes that are deleted from the data source.
377 int64 bytes_deleted_from_source = 10;
379 // Objects that are deleted from the data sink.
380 int64 objects_deleted_from_sink = 11;
382 // Bytes that are deleted from the data sink.
383 int64 bytes_deleted_from_sink = 12;
385 // Objects in the data source that failed during the transfer.
386 int64 objects_from_source_failed = 13;
388 // Bytes in the data source that failed during the transfer.
389 int64 bytes_from_source_failed = 14;
391 // Objects that failed to be deleted from the data sink.
392 int64 objects_failed_to_delete_from_sink = 15;
394 // Bytes that failed to be deleted from the data sink.
395 int64 bytes_failed_to_delete_from_sink = 16;
398 // A description of the execution of a transfer.
399 message TransferOperation {
400 // The status of a TransferOperation.
402 // Zero is an illegal value.
403 STATUS_UNSPECIFIED = 0;
411 // Completed successfully.
414 // Terminated due to an unrecoverable failure.
417 // Aborted by the user.
421 // A globally unique ID assigned by the system.
424 // The ID of the Google Cloud Platform Console project that owns the
425 // operation. Required.
426 string project_id = 2;
428 // Transfer specification.
430 TransferSpec transfer_spec = 3;
432 // Start time of this transfer execution.
433 google.protobuf.Timestamp start_time = 4;
435 // End time of this transfer execution.
436 google.protobuf.Timestamp end_time = 5;
438 // Status of the transfer operation.
441 // Information about the progress of the transfer operation.
442 TransferCounters counters = 7;
444 // Summarizes errors encountered with sample error log entries.
445 repeated ErrorSummary error_breakdowns = 8;
447 // The name of the transfer job that triggers this transfer operation.
448 string transfer_job_name = 9;