+ rpc BatchDeleteIntents(BatchDeleteIntentsRequest) returns (google.longrunning.Operation) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*/agent}/intents:batchDelete"
+ body: "*"
+ };
+ }
+}
+
+// Represents an intent.
+// Intents convert a number of user expressions or patterns into an action. An
+// action is an extraction of a user command or sentence semantics.
+message Intent {
+ // Represents an example or template that the agent is trained on.
+ message TrainingPhrase {
+ // Represents a part of a training phrase.
+ message Part {
+ // Required. The text corresponding to the example or template,
+ // if there are no annotations. For
+ // annotated examples, it is the text for one of the example's parts.
+ string text = 1;
+
+ // Optional. The entity type name prefixed with `@`. This field is
+ // required for the annotated part of the text and applies only to
+ // examples.
+ string entity_type = 2;
+
+ // Optional. The parameter name for the value extracted from the
+ // annotated part of the example.
+ string alias = 3;
+
+ // Optional. Indicates whether the text was manually annotated by the
+ // developer.
+ bool user_defined = 4;
+ }
+
+ // Represents different types of training phrases.
+ enum Type {
+ // Not specified. This value should never be used.
+ TYPE_UNSPECIFIED = 0;
+
+ // Examples do not contain @-prefixed entity type names, but example parts
+ // can be annotated with entity types.
+ EXAMPLE = 1;
+
+ // Templates are not annotated with entity types, but they can contain
+ // @-prefixed entity type names as substrings.
+ TEMPLATE = 2;
+ }
+
+ // Required. The unique identifier of this training phrase.
+ string name = 1;
+
+ // Required. The type of the training phrase.
+ Type type = 2;
+
+ // Required. The collection of training phrase parts (can be annotated).
+ // Fields: `entity_type`, `alias` and `user_defined` should be populated
+ // only for the annotated parts of the training phrase.
+ repeated Part parts = 3;
+
+ // Optional. Indicates how many times this example or template was added to
+ // the intent. Each time a developer adds an existing sample by editing an
+ // intent or training, this counter is increased.
+ int32 times_added_count = 4;
+ }
+
+ // Represents intent parameters.
+ message Parameter {
+ // The unique identifier of this parameter.
+ string name = 1;
+
+ // Required. The name of the parameter.
+ string display_name = 2;
+
+ // Optional. The definition of the parameter value. It can be:
+ // - a constant string,
+ // - a parameter value defined as `$parameter_name`,
+ // - an original parameter value defined as `$parameter_name.original`,
+ // - a parameter value from some context defined as
+ // `#context_name.parameter_name`.
+ string value = 3;
+
+ // Optional. The default value to use when the `value` yields an empty
+ // result.
+ // Default values can be extracted from contexts by using the following
+ // syntax: `#context_name.parameter_name`.
+ string default_value = 4;
+
+ // Optional. The name of the entity type, prefixed with `@`, that
+ // describes values of the parameter. If the parameter is
+ // required, this must be provided.
+ string entity_type_display_name = 5;
+
+ // Optional. Indicates whether the parameter is required. That is,
+ // whether the intent cannot be completed without collecting the parameter
+ // value.
+ bool mandatory = 6;
+
+ // Optional. The collection of prompts that the agent can present to the
+ // user in order to collect value for the parameter.
+ repeated string prompts = 7;
+
+ // Optional. Indicates whether the parameter represents a list of values.
+ bool is_list = 8;
+ }
+
+ // Corresponds to the `Response` field in the Dialogflow console.
+ message Message {
+ // The text response message.
+ message Text {
+ // Optional. The collection of the agent's responses.
+ repeated string text = 1;
+ }
+
+ // The image response message.
+ message Image {
+ // Optional. The public URI to an image file.
+ string image_uri = 1;
+
+ // Optional. A text description of the image to be used for accessibility,
+ // e.g., screen readers.
+ string accessibility_text = 2;
+ }
+
+ // The quick replies response message.
+ message QuickReplies {
+ // Optional. The title of the collection of quick replies.
+ string title = 1;
+
+ // Optional. The collection of quick replies.
+ repeated string quick_replies = 2;
+ }
+
+ // The card response message.
+ message Card {
+ // Optional. Contains information about a button.
+ message Button {
+ // Optional. The text to show on the button.
+ string text = 1;
+
+ // Optional. The text to send back to the Dialogflow API or a URI to
+ // open.
+ string postback = 2;
+ }
+
+ // Optional. The title of the card.
+ string title = 1;
+
+ // Optional. The subtitle of the card.
+ string subtitle = 2;
+
+ // Optional. The public URI to an image file for the card.
+ string image_uri = 3;
+
+ // Optional. The collection of card buttons.
+ repeated Button buttons = 4;
+ }
+
+ // The simple response message containing speech or text.
+ message SimpleResponse {
+ // One of text_to_speech or ssml must be provided. The plain text of the
+ // speech output. Mutually exclusive with ssml.
+ string text_to_speech = 1;
+
+ // One of text_to_speech or ssml must be provided. Structured spoken
+ // response to the user in the SSML format. Mutually exclusive with
+ // text_to_speech.
+ string ssml = 2;
+
+ // Optional. The text to display.
+ string display_text = 3;
+ }
+
+ // The collection of simple response candidates.
+ // This message in `QueryResult.fulfillment_messages` and
+ // `WebhookResponse.fulfillment_messages` should contain only one
+ // `SimpleResponse`.
+ message SimpleResponses {
+ // Required. The list of simple responses.
+ repeated SimpleResponse simple_responses = 1;
+ }
+
+ // The basic card message. Useful for displaying information.
+ message BasicCard {
+ // The button object that appears at the bottom of a card.
+ message Button {
+ // Opens the given URI.
+ message OpenUriAction {
+ // Required. The HTTP or HTTPS scheme URI.
+ string uri = 1;
+ }
+
+ // Required. The title of the button.
+ string title = 1;
+
+ // Required. Action to take when a user taps on the button.
+ OpenUriAction open_uri_action = 2;
+ }
+
+ // Optional. The title of the card.
+ string title = 1;
+
+ // Optional. The subtitle of the card.
+ string subtitle = 2;
+
+ // Required, unless image is present. The body text of the card.
+ string formatted_text = 3;
+
+ // Optional. The image for the card.
+ Image image = 4;
+
+ // Optional. The collection of card buttons.
+ repeated Button buttons = 5;
+ }
+
+ // The suggestion chip message that the user can tap to quickly post a reply
+ // to the conversation.
+ message Suggestion {
+ // Required. The text shown the in the suggestion chip.
+ string title = 1;
+ }
+
+ // The collection of suggestions.
+ message Suggestions {
+ // Required. The list of suggested replies.
+ repeated Suggestion suggestions = 1;
+ }
+
+ // The suggestion chip message that allows the user to jump out to the app
+ // or website associated with this agent.
+ message LinkOutSuggestion {
+ // Required. The name of the app or site this chip is linking to.
+ string destination_name = 1;
+
+ // Required. The URI of the app or site to open when the user taps the
+ // suggestion chip.
+ string uri = 2;
+ }
+
+ // The card for presenting a list of options to select from.
+ message ListSelect {
+ // An item in the list.
+ message Item {
+ // Required. Additional information about this option.
+ SelectItemInfo info = 1;
+
+ // Required. The title of the list item.
+ string title = 2;
+
+ // Optional. The main text describing the item.
+ string description = 3;
+
+ // Optional. The image to display.
+ Image image = 4;
+ }
+
+ // Optional. The overall title of the list.
+ string title = 1;
+
+ // Required. List items.
+ repeated Item items = 2;
+ }
+
+ // The card for presenting a carousel of options to select from.
+ message CarouselSelect {
+ // An item in the carousel.
+ message Item {
+ // Required. Additional info about the option item.
+ SelectItemInfo info = 1;
+
+ // Required. Title of the carousel item.
+ string title = 2;
+
+ // Optional. The body text of the card.
+ string description = 3;
+
+ // Optional. The image to display.
+ Image image = 4;
+ }
+
+ // Required. Carousel items.
+ repeated Item items = 1;
+ }
+
+ // Additional info about the select item for when it is triggered in a
+ // dialog.
+ message SelectItemInfo {
+ // Required. A unique key that will be sent back to the agent if this
+ // response is given.
+ string key = 1;
+
+ // Optional. A list of synonyms that can also be used to trigger this
+ // item in dialog.
+ repeated string synonyms = 2;
+ }
+
+ // Represents different platforms that a rich message can be intended for.
+ enum Platform {
+ // Not specified.
+ PLATFORM_UNSPECIFIED = 0;
+
+ // Facebook.
+ FACEBOOK = 1;
+
+ // Slack.
+ SLACK = 2;
+
+ // Telegram.
+ TELEGRAM = 3;
+
+ // Kik.
+ KIK = 4;
+
+ // Skype.
+ SKYPE = 5;
+
+ // Line.
+ LINE = 6;
+
+ // Viber.
+ VIBER = 7;
+
+ // Actions on Google.
+ // When using Actions on Google, you can choose one of the specific
+ // Intent.Message types that mention support for Actions on Google,
+ // or you can use the advanced Intent.Message.payload field.
+ // The payload field provides access to AoG features not available in the
+ // specific message types.
+ // If using the Intent.Message.payload field, it should have a structure
+ // similar to the JSON message shown here. For more information, see
+ // [Actions on Google Webhook
+ // Format](https://developers.google.com/actions/dialogflow/webhook)
+ // {
+ // "expectUserResponse": true,
+ // "isSsml": false,
+ // "noInputPrompts": [],
+ // "richResponse": {
+ // "items": [
+ // {
+ // "simpleResponse": {
+ // "displayText": "hi",
+ // "textToSpeech": "hello"
+ // }
+ // }
+ // ],
+ // "suggestions": [
+ // {
+ // "title": "Say this"
+ // },
+ // {
+ // "title": "or this"
+ // }
+ // ]
+ // },
+ // "systemIntent": {
+ // "data": {
+ // "@type": "type.googleapis.com/google.actions.v2.OptionValueSpec",
+ // "listSelect": {
+ // "items": [
+ // {
+ // "optionInfo": {
+ // "key": "key1",
+ // "synonyms": [
+ // "key one"
+ // ]
+ // },
+ // "title": "must not be empty, but unique"
+ // },
+ // {
+ // "optionInfo": {
+ // "key": "key2",
+ // "synonyms": [
+ // "key two"
+ // ]
+ // },
+ // "title": "must not be empty, but unique"
+ // }
+ // ]
+ // }
+ // },
+ // "intent": "actions.intent.OPTION"
+ // }
+ // }
+ ACTIONS_ON_GOOGLE = 8;
+ }
+
+ // Required. The rich response message.
+ oneof message {
+ // The text response.
+ Text text = 1;
+
+ // The image response.
+ Image image = 2;
+
+ // The quick replies response.
+ QuickReplies quick_replies = 3;
+
+ // The card response.
+ Card card = 4;
+
+ // Returns a response containing a custom, platform-specific payload.
+ // See the Intent.Message.Platform type for a description of the
+ // structure that may be required for your platform.
+ google.protobuf.Struct payload = 5;
+
+ // The voice and text-only responses for Actions on Google.
+ SimpleResponses simple_responses = 7;
+
+ // The basic card response for Actions on Google.
+ BasicCard basic_card = 8;
+
+ // The suggestion chips for Actions on Google.
+ Suggestions suggestions = 9;
+
+ // The link out suggestion chip for Actions on Google.
+ LinkOutSuggestion link_out_suggestion = 10;
+
+ // The list card response for Actions on Google.
+ ListSelect list_select = 11;
+
+ // The carousel card response for Actions on Google.
+ CarouselSelect carousel_select = 12;
+ }
+
+ // Optional. The platform that this message is intended for.
+ Platform platform = 6;
+ }
+
+ // Represents a single followup intent in the chain.
+ message FollowupIntentInfo {
+ // The unique identifier of the followup intent.
+ // Format: `projects//agent/intents/`.
+ string followup_intent_name = 1;
+
+ // The unique identifier of the followup intent parent.
+ // Format: `projects//agent/intents/`.
+ string parent_followup_intent_name = 2;
+ }
+
+ // Represents the different states that webhooks can be in.
+ enum WebhookState {
+ // Webhook is disabled in the agent and in the intent.
+ WEBHOOK_STATE_UNSPECIFIED = 0;
+
+ // Webhook is enabled in the agent and in the intent.
+ WEBHOOK_STATE_ENABLED = 1;
+
+ // Webhook is enabled in the agent and in the intent. Also, each slot
+ // filling prompt is forwarded to the webhook.
+ WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING = 2;
+ }
+
+ // Required for all methods except `create` (`create` populates the name
+ // automatically.
+ // The unique identifier of this intent.
+ // Format: `projects//agent/intents/`.
+ string name = 1;
+
+ // Required. The name of this intent.
+ string display_name = 2;
+
+ // Required. Indicates whether webhooks are enabled for the intent.
+ WebhookState webhook_state = 6;
+
+ // Optional. The priority of this intent. Higher numbers represent higher
+ // priorities. Zero or negative numbers mean that the intent is disabled.
+ int32 priority = 3;
+
+ // Optional. Indicates whether this is a fallback intent.
+ bool is_fallback = 4;
+
+ // Optional. Indicates whether Machine Learning is disabled for the intent.
+ // Note: If `ml_diabled` setting is set to true, then this intent is not
+ // taken into account during inference in `ML ONLY` match mode. Also,
+ // auto-markup in the UI is turned off.
+ bool ml_disabled = 19;
+
+ // Optional. The list of context names required for this intent to be
+ // triggered.
+ // Format: `projects//agent/sessions/-/contexts/`.
+ repeated string input_context_names = 7;
+
+ // Optional. The collection of event names that trigger the intent.
+ // If the collection of input contexts is not empty, all of the contexts must
+ // be present in the active user session for an event to trigger this intent.
+ repeated string events = 8;
+
+ // Optional. The collection of examples/templates that the agent is
+ // trained on.
+ repeated TrainingPhrase training_phrases = 9;
+
+ // Optional. The name of the action associated with the intent.
+ string action = 10;
+
+ // Optional. The collection of contexts that are activated when the intent
+ // is matched. Context messages in this collection should not set the
+ // parameters field. Setting the `lifespan_count` to 0 will reset the context
+ // when the intent is matched.
+ // Format: `projects//agent/sessions/-/contexts/`.
+ repeated Context output_contexts = 11;
+
+ // Optional. Indicates whether to delete all contexts in the current
+ // session when this intent is matched.
+ bool reset_contexts = 12;
+
+ // Optional. The collection of parameters associated with the intent.
+ repeated Parameter parameters = 13;
+
+ // Optional. The collection of rich messages corresponding to the
+ // `Response` field in the Dialogflow console.
+ repeated Message messages = 14;
+
+ // Optional. The list of platforms for which the first response will be
+ // taken from among the messages assigned to the DEFAULT_PLATFORM.
+ repeated Message.Platform default_response_platforms = 15;
+
+ // The unique identifier of the root intent in the chain of followup intents.
+ // It identifies the correct followup intents chain for this intent.
+ // Format: `projects//agent/intents/`.
+ string root_followup_intent_name = 16;
+
+ // The unique identifier of the parent intent in the chain of followup
+ // intents.
+ // It identifies the parent followup intent.
+ // Format: `projects//agent/intents/`.
+ string parent_followup_intent_name = 17;
+
+ // Optional. Collection of information about all followup intents that have
+ // name of this intent as a root_name.
+ repeated FollowupIntentInfo followup_intent_info = 18;
+}
+
+// The request message for [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents].
+message ListIntentsRequest {
+ // Required. The agent to list all intents from.
+ // Format: `projects//agent`.
+ string parent = 1;
+
+ // Optional. The language to list training phrases, parameters and rich
+ // messages for. If not specified, the agent's default language is used.
+ // [More than a dozen
+ // languages](https://dialogflow.com/docs/reference/language) are supported.
+ // Note: languages must be enabled in the agent before they can be used.
+ string language_code = 2;
+
+ // Optional. The resource view to apply to the returned intent.
+ IntentView intent_view = 3;
+
+ // Optional. The maximum number of items to return in a single page. By
+ // default 100 and at most 1000.
+ int32 page_size = 4;
+
+ // Optional. The next_page_token value returned from a previous list request.
+ string page_token = 5;
+}
+
+// The response message for [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents].
+message ListIntentsResponse {
+ // The list of agent intents. There will be a maximum number of items
+ // returned based on the page_size field in the request.
+ repeated Intent intents = 1;
+
+ // Token to retrieve the next page of results, or empty if there are no
+ // more results in the list.
+ string next_page_token = 2;
+}
+
+// The request message for [Intents.GetIntent][google.cloud.dialogflow.v2.Intents.GetIntent].
+message GetIntentRequest {
+ // Required. The name of the intent.
+ // Format: `projects//agent/intents/`.
+ string name = 1;
+
+ // Optional. The language to retrieve training phrases, parameters and rich
+ // messages for. If not specified, the agent's default language is used.
+ // [More than a dozen
+ // languages](https://dialogflow.com/docs/reference/language) are supported.
+ // Note: languages must be enabled in the agent, before they can be used.
+ string language_code = 2;
+
+ // Optional. The resource view to apply to the returned intent.
+ IntentView intent_view = 3;
+}
+
+// The request message for [Intents.CreateIntent][google.cloud.dialogflow.v2.Intents.CreateIntent].
+message CreateIntentRequest {
+ // Required. The agent to create a intent for.
+ // Format: `projects//agent`.
+ string parent = 1;
+
+ // Required. The intent to create.
+ Intent intent = 2;
+
+ // Optional. The language of training phrases, parameters and rich messages
+ // defined in `intent`. If not specified, the agent's default language is
+ // used. [More than a dozen
+ // languages](https://dialogflow.com/docs/reference/language) are supported.
+ // Note: languages must be enabled in the agent, before they can be used.
+ string language_code = 3;
+
+ // Optional. The resource view to apply to the returned intent.
+ IntentView intent_view = 4;
+}
+
+// The request message for [Intents.UpdateIntent][google.cloud.dialogflow.v2.Intents.UpdateIntent].
+message UpdateIntentRequest {
+ // Required. The intent to update.
+ // Format: `projects//agent/intents/`.
+ Intent intent = 1;
+
+ // Optional. The language of training phrases, parameters and rich messages
+ // defined in `intent`. If not specified, the agent's default language is
+ // used. [More than a dozen
+ // languages](https://dialogflow.com/docs/reference/language) are supported.
+ // Note: languages must be enabled in the agent, before they can be used.
+ string language_code = 2;
+
+ // Optional. The mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 3;
+
+ // Optional. The resource view to apply to the returned intent.
+ IntentView intent_view = 4;
+}
+
+// The request message for [Intents.DeleteIntent][google.cloud.dialogflow.v2.Intents.DeleteIntent].
+message DeleteIntentRequest {
+ // Required. The name of the intent to delete.
+ // Format: `projects//agent/intents/`.
+ string name = 1;
+}
+
+// The request message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents].
+message BatchUpdateIntentsRequest {
+ // Required. The name of the agent to update or create intents in.
+ // Format: `projects//agent`.
+ string parent = 1;
+
+ // Required. The source of the intent batch.
+ oneof intent_batch {
+ // The URI to a Google Cloud Storage file containing intents to update or
+ // create. The file format can either be a serialized proto (of IntentBatch
+ // type) or JSON object. Note: The URI must start with "gs://".
+ string intent_batch_uri = 2;
+
+ // The collection of intents to update or create.
+ IntentBatch intent_batch_inline = 3;
+ }
+
+ // Optional. The language of training phrases, parameters and rich messages
+ // defined in `intents`. If not specified, the agent's default language is
+ // used. [More than a dozen
+ // languages](https://dialogflow.com/docs/reference/language) are supported.
+ // Note: languages must be enabled in the agent, before they can be used.
+ string language_code = 4;
+
+ // Optional. The mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 5;
+
+ // Optional. The resource view to apply to the returned intent.
+ IntentView intent_view = 6;
+}
+
+// The response message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents].
+message BatchUpdateIntentsResponse {
+ // The collection of updated or created intents.
+ repeated Intent intents = 1;
+}
+
+// The request message for [Intents.BatchDeleteIntents][google.cloud.dialogflow.v2.Intents.BatchDeleteIntents].
+message BatchDeleteIntentsRequest {
+ // Required. The name of the agent to delete all entities types for. Format:
+ // `projects//agent`.
+ string parent = 1;
+
+ // Required. The collection of intents to delete. Only intent `name` must be
+ // filled in.
+ repeated Intent intents = 2;
+}
+
+// This message is a wrapper around a collection of intents.
+message IntentBatch {
+ // A collection of intents.
+ repeated Intent intents = 1;
+}
+
+// Represents the options for views of an intent.
+// An intent can be a sizable object. Therefore, we provide a resource view that
+// does not return training phrases in the response by default.
+enum IntentView {
+ // Training phrases field is not populated in the response.
+ INTENT_VIEW_UNSPECIFIED = 0;
+
+ // All fields are populated.
+ INTENT_VIEW_FULL = 1;
+}
diff --git a/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/session.proto b/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/session.proto
new file mode 100644
index 00000000..085e3504
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/session.proto
@@ -0,0 +1,478 @@
+// Copyright 2018 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.dialogflow.v2;
+
+import "google/api/annotations.proto";
+import "google/cloud/dialogflow/v2/context.proto";
+import "google/cloud/dialogflow/v2/intent.proto";
+import "google/cloud/dialogflow/v2/session_entity_type.proto";
+import "google/protobuf/struct.proto";
+import "google/rpc/status.proto";
+import "google/type/latlng.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.Cloud.Dialogflow.V2";
+option go_package = "google.golang.org/genproto/googleapis/cloud/dialogflow/v2;dialogflow";
+option java_multiple_files = true;
+option java_outer_classname = "SessionProto";
+option java_package = "com.google.cloud.dialogflow.v2";
+option objc_class_prefix = "DF";
+
+
+// A session represents an interaction with a user. You retrieve user input
+// and pass it to the [DetectIntent][google.cloud.dialogflow.v2.Sessions.DetectIntent] (or
+// [StreamingDetectIntent][google.cloud.dialogflow.v2.Sessions.StreamingDetectIntent]) method to determine
+// user intent and respond.
+service Sessions {
+ // Processes a natural language query and returns structured, actionable data
+ // as a result. This method is not idempotent, because it may cause contexts
+ // and session entity types to be updated, which in turn might affect
+ // results of future queries.
+ rpc DetectIntent(DetectIntentRequest) returns (DetectIntentResponse) {
+ option (google.api.http) = {
+ post: "/v2/{session=projects/*/agent/sessions/*}:detectIntent"
+ body: "*"
+ };
+ }
+
+ // Processes a natural language query in audio format in a streaming fashion
+ // and returns structured, actionable data as a result. This method is only
+ // available via the gRPC API (not REST).
+ rpc StreamingDetectIntent(stream StreamingDetectIntentRequest) returns (stream StreamingDetectIntentResponse);
+}
+
+// The request to detect user's intent.
+message DetectIntentRequest {
+ // Required. The name of the session this query is sent to. Format:
+ // `projects//agent/sessions/`. It's up to the API
+ // caller to choose an appropriate session ID. It can be a random number or
+ // some type of user identifier (preferably hashed). The length of the session
+ // ID must not exceed 36 bytes.
+ string session = 1;
+
+ // Optional. The parameters of this query.
+ QueryParameters query_params = 2;
+
+ // Required. The input specification. It can be set to:
+ //
+ // 1. an audio config
+ // which instructs the speech recognizer how to process the speech audio,
+ //
+ // 2. a conversational query in the form of text, or
+ //
+ // 3. an event that specifies which intent to trigger.
+ QueryInput query_input = 3;
+
+ // Optional. The natural language speech audio to be processed. This field
+ // should be populated iff `query_input` is set to an input audio config.
+ // A single request can contain up to 1 minute of speech audio data.
+ bytes input_audio = 5;
+}
+
+// The message returned from the DetectIntent method.
+message DetectIntentResponse {
+ // The unique identifier of the response. It can be used to
+ // locate a response in the training example set or for reporting issues.
+ string response_id = 1;
+
+ // The results of the conversational query or event processing.
+ QueryResult query_result = 2;
+
+ // Specifies the status of the webhook request. `webhook_status`
+ // is never populated in webhook requests.
+ google.rpc.Status webhook_status = 3;
+}
+
+// Represents the parameters of the conversational query.
+message QueryParameters {
+ // Optional. The time zone of this conversational query from the
+ // [time zone database](https://www.iana.org/time-zones), e.g.,
+ // America/New_York, Europe/Paris. If not provided, the time zone specified in
+ // agent settings is used.
+ string time_zone = 1;
+
+ // Optional. The geo location of this conversational query.
+ google.type.LatLng geo_location = 2;
+
+ // Optional. The collection of contexts to be activated before this query is
+ // executed.
+ repeated Context contexts = 3;
+
+ // Optional. Specifies whether to delete all contexts in the current session
+ // before the new ones are activated.
+ bool reset_contexts = 4;
+
+ // Optional. The collection of session entity types to replace or extend
+ // developer entities with for this query only. The entity synonyms apply
+ // to all languages.
+ repeated SessionEntityType session_entity_types = 5;
+
+ // Optional. This field can be used to pass custom data into the webhook
+ // associated with the agent. Arbitrary JSON objects are supported.
+ google.protobuf.Struct payload = 6;
+}
+
+// Represents the query input. It can contain either:
+//
+// 1. An audio config which
+// instructs the speech recognizer how to process the speech audio.
+//
+// 2. A conversational query in the form of text,.
+//
+// 3. An event that specifies which intent to trigger.
+message QueryInput {
+ // Required. The input specification.
+ oneof input {
+ // Instructs the speech recognizer how to process the speech audio.
+ InputAudioConfig audio_config = 1;
+
+ // The natural language text to be processed.
+ TextInput text = 2;
+
+ // The event to be processed.
+ EventInput event = 3;
+ }
+}
+
+// Represents the result of conversational query or event processing.
+message QueryResult {
+ // The original conversational query text:
+ // - If natural language text was provided as input, `query_text` contains
+ // a copy of the input.
+ // - If natural language speech audio was provided as input, `query_text`
+ // contains the speech recognition result. If speech recognizer produced
+ // multiple alternatives, a particular one is picked.
+ // - If an event was provided as input, `query_text` is not set.
+ string query_text = 1;
+
+ // The language that was triggered during intent detection.
+ // See [Language Support](https://dialogflow.com/docs/reference/language)
+ // for a list of the currently supported language codes.
+ string language_code = 15;
+
+ // The Speech recognition confidence between 0.0 and 1.0. A higher number
+ // indicates an estimated greater likelihood that the recognized words are
+ // correct. The default of 0.0 is a sentinel value indicating that confidence
+ // was not set.
+ //
+ // You should not rely on this field as it isn't guaranteed to be accurate, or
+ // even set. In particular this field isn't set in Webhook calls and for
+ // StreamingDetectIntent since the streaming endpoint has separate confidence
+ // estimates per portion of the audio in StreamingRecognitionResult.
+ float speech_recognition_confidence = 2;
+
+ // The action name from the matched intent.
+ string action = 3;
+
+ // The collection of extracted parameters.
+ google.protobuf.Struct parameters = 4;
+
+ // This field is set to:
+ // - `false` if the matched intent has required parameters and not all of
+ // the required parameter values have been collected.
+ // - `true` if all required parameter values have been collected, or if the
+ // matched intent doesn't contain any required parameters.
+ bool all_required_params_present = 5;
+
+ // The text to be pronounced to the user or shown on the screen.
+ string fulfillment_text = 6;
+
+ // The collection of rich messages to present to the user.
+ repeated Intent.Message fulfillment_messages = 7;
+
+ // If the query was fulfilled by a webhook call, this field is set to the
+ // value of the `source` field returned in the webhook response.
+ string webhook_source = 8;
+
+ // If the query was fulfilled by a webhook call, this field is set to the
+ // value of the `payload` field returned in the webhook response.
+ google.protobuf.Struct webhook_payload = 9;
+
+ // The collection of output contexts. If applicable,
+ // `output_contexts.parameters` contains entries with name
+ // `.original` containing the original parameter values
+ // before the query.
+ repeated Context output_contexts = 10;
+
+ // The intent that matched the conversational query. Some, not
+ // all fields are filled in this message, including but not limited to:
+ // `name`, `display_name` and `webhook_state`.
+ Intent intent = 11;
+
+ // The intent detection confidence. Values range from 0.0
+ // (completely uncertain) to 1.0 (completely certain).
+ float intent_detection_confidence = 12;
+
+ // The free-form diagnostic info. For example, this field
+ // could contain webhook call latency.
+ google.protobuf.Struct diagnostic_info = 14;
+}
+
+// The top-level message sent by the client to the
+// `StreamingDetectIntent` method.
+//
+// Multiple request messages should be sent in order:
+//
+// 1. The first message must contain `session`, `query_input` plus optionally
+// `query_params` and/or `single_utterance`. The message must not contain `input_audio`.
+//
+// 2. If `query_input` was set to a streaming input audio config,
+// all subsequent messages must contain only `input_audio`.
+// Otherwise, finish the request stream.
+message StreamingDetectIntentRequest {
+ // Required. The name of the session the query is sent to.
+ // Format of the session name:
+ // `projects//agent/sessions/`. It’s up to the API
+ // caller to choose an appropriate . It can be a random number or
+ // some type of user identifier (preferably hashed). The length of the session
+ // ID must not exceed 36 characters.
+ string session = 1;
+
+ // Optional. The parameters of this query.
+ QueryParameters query_params = 2;
+
+ // Required. The input specification. It can be set to:
+ //
+ // 1. an audio config which instructs the speech recognizer how to process
+ // the speech audio,
+ //
+ // 2. a conversational query in the form of text, or
+ //
+ // 3. an event that specifies which intent to trigger.
+ QueryInput query_input = 3;
+
+ // Optional. If `false` (default), recognition does not cease until the
+ // client closes the stream.
+ // If `true`, the recognizer will detect a single spoken utterance in input
+ // audio. Recognition ceases when it detects the audio's voice has
+ // stopped or paused. In this case, once a detected intent is received, the
+ // client should close the stream and start a new request with a new stream as
+ // needed.
+ // This setting is ignored when `query_input` is a piece of text or an event.
+ bool single_utterance = 4;
+
+ // Optional. The input audio content to be recognized. Must be sent if
+ // `query_input` was set to a streaming input audio config. The complete audio
+ // over all streaming messages must not exceed 1 minute.
+ bytes input_audio = 6;
+}
+
+// The top-level message returned from the
+// `StreamingDetectIntent` method.
+//
+// Multiple response messages can be returned in order:
+//
+// 1. If the input was set to streaming audio, the first one or more messages
+// contain `recognition_result`. Each `recognition_result` represents a more
+// complete transcript of what the user said. The last `recognition_result`
+// has `is_final` set to `true`.
+//
+// 2. The next message contains `response_id`, `query_result`
+// and optionally `webhook_status` if a WebHook was called.
+message StreamingDetectIntentResponse {
+ // The unique identifier of the response. It can be used to
+ // locate a response in the training example set or for reporting issues.
+ string response_id = 1;
+
+ // The result of speech recognition.
+ StreamingRecognitionResult recognition_result = 2;
+
+ // The result of the conversational query or event processing.
+ QueryResult query_result = 3;
+
+ // Specifies the status of the webhook request.
+ google.rpc.Status webhook_status = 4;
+}
+
+// Contains a speech recognition result corresponding to a portion of the audio
+// that is currently being processed or an indication that this is the end
+// of the single requested utterance.
+//
+// Example:
+//
+// 1. transcript: "tube"
+//
+// 2. transcript: "to be a"
+//
+// 3. transcript: "to be"
+//
+// 4. transcript: "to be or not to be"
+// is_final: true
+//
+// 5. transcript: " that's"
+//
+// 6. transcript: " that is"
+//
+// 7. recognition_event_type: `RECOGNITION_EVENT_END_OF_SINGLE_UTTERANCE`
+//
+// 8. transcript: " that is the question"
+// is_final: true
+//
+// Only two of the responses contain final results (#4 and #8 indicated by
+// `is_final: true`). Concatenating these generates the full transcript: "to be
+// or not to be that is the question".
+//
+// In each response we populate:
+//
+// * for `MESSAGE_TYPE_TRANSCRIPT`: `transcript` and possibly `is_final`.
+//
+// * for `MESSAGE_TYPE_END_OF_SINGLE_UTTERANCE`: only `event_type`.
+message StreamingRecognitionResult {
+ // Type of the response message.
+ enum MessageType {
+ // Not specified. Should never be used.
+ MESSAGE_TYPE_UNSPECIFIED = 0;
+
+ // Message contains a (possibly partial) transcript.
+ TRANSCRIPT = 1;
+
+ // Event indicates that the server has detected the end of the user's speech
+ // utterance and expects no additional speech. Therefore, the server will
+ // not process additional audio (although it may subsequently return
+ // additional results). The client should stop sending additional audio
+ // data, half-close the gRPC connection, and wait for any additional results
+ // until the server closes the gRPC connection. This message is only sent if
+ // `single_utterance` was set to `true`, and is not used otherwise.
+ END_OF_SINGLE_UTTERANCE = 2;
+ }
+
+ // Type of the result message.
+ MessageType message_type = 1;
+
+ // Transcript text representing the words that the user spoke.
+ // Populated if and only if `event_type` = `RECOGNITION_EVENT_TRANSCRIPT`.
+ string transcript = 2;
+
+ // The default of 0.0 is a sentinel value indicating `confidence` was not set.
+ // If `false`, the `StreamingRecognitionResult` represents an
+ // interim result that may change. If `true`, the recognizer will not return
+ // any further hypotheses about this piece of the audio. May only be populated
+ // for `event_type` = `RECOGNITION_EVENT_TRANSCRIPT`.
+ bool is_final = 3;
+
+ // The Speech confidence between 0.0 and 1.0 for the current portion of audio.
+ // A higher number indicates an estimated greater likelihood that the
+ // recognized words are correct. The default of 0.0 is a sentinel value
+ // indicating that confidence was not set.
+ //
+ // This field is typically only provided if `is_final` is true and you should
+ // not rely on it being accurate or even set.
+ float confidence = 4;
+}
+
+// Instructs the speech recognizer how to process the audio content.
+message InputAudioConfig {
+ // Required. Audio encoding of the audio content to process.
+ AudioEncoding audio_encoding = 1;
+
+ // Required. Sample rate (in Hertz) of the audio content sent in the query.
+ // Refer to [Cloud Speech API documentation](/speech/docs/basics) for more
+ // details.
+ int32 sample_rate_hertz = 2;
+
+ // Required. The language of the supplied audio. Dialogflow does not do
+ // translations. See [Language
+ // Support](https://dialogflow.com/docs/languages) for a list of the
+ // currently supported language codes. Note that queries in the same session
+ // do not necessarily need to specify the same language.
+ string language_code = 3;
+
+ // Optional. The collection of phrase hints which are used to boost accuracy
+ // of speech recognition.
+ // Refer to [Cloud Speech API documentation](/speech/docs/basics#phrase-hints)
+ // for more details.
+ repeated string phrase_hints = 4;
+}
+
+// Represents the natural language text to be processed.
+message TextInput {
+ // Required. The UTF-8 encoded natural language text to be processed.
+ // Text length must not exceed 256 bytes.
+ string text = 1;
+
+ // Required. The language of this conversational query. See [Language
+ // Support](https://dialogflow.com/docs/languages) for a list of the
+ // currently supported language codes. Note that queries in the same session
+ // do not necessarily need to specify the same language.
+ string language_code = 2;
+}
+
+// Events allow for matching intents by event name instead of the natural
+// language input. For instance, input `` can trigger a personalized welcome response.
+// The parameter `name` may be used by the agent in the response:
+// `“Hello #welcome_event.name! What can I do for you today?”`.
+message EventInput {
+ // Required. The unique identifier of the event.
+ string name = 1;
+
+ // Optional. The collection of parameters associated with the event.
+ google.protobuf.Struct parameters = 2;
+
+ // Required. The language of this query. See [Language
+ // Support](https://dialogflow.com/docs/languages) for a list of the
+ // currently supported language codes. Note that queries in the same session
+ // do not necessarily need to specify the same language.
+ string language_code = 3;
+}
+
+// Audio encoding of the audio content sent in the conversational query request.
+// Refer to the [Cloud Speech API documentation](/speech/docs/basics) for more
+// details.
+enum AudioEncoding {
+ // Not specified.
+ AUDIO_ENCODING_UNSPECIFIED = 0;
+
+ // Uncompressed 16-bit signed little-endian samples (Linear PCM).
+ AUDIO_ENCODING_LINEAR_16 = 1;
+
+ // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio
+ // Codec) is the recommended encoding because it is lossless (therefore
+ // recognition is not compromised) and requires only about half the
+ // bandwidth of `LINEAR16`. `FLAC` stream encoding supports 16-bit and
+ // 24-bit samples, however, not all fields in `STREAMINFO` are supported.
+ AUDIO_ENCODING_FLAC = 2;
+
+ // 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law.
+ AUDIO_ENCODING_MULAW = 3;
+
+ // Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz` must be 8000.
+ AUDIO_ENCODING_AMR = 4;
+
+ // Adaptive Multi-Rate Wideband codec. `sample_rate_hertz` must be 16000.
+ AUDIO_ENCODING_AMR_WB = 5;
+
+ // Opus encoded audio frames in Ogg container
+ // ([OggOpus](https://wiki.xiph.org/OggOpus)).
+ // `sample_rate_hertz` must be 16000.
+ AUDIO_ENCODING_OGG_OPUS = 6;
+
+ // Although the use of lossy encodings is not recommended, if a very low
+ // bitrate encoding is required, `OGG_OPUS` is highly preferred over
+ // Speex encoding. The [Speex](https://speex.org/) encoding supported by
+ // Dialogflow API has a header byte in each block, as in MIME type
+ // `audio/x-speex-with-header-byte`.
+ // It is a variant of the RTP Speex encoding defined in
+ // [RFC 5574](https://tools.ietf.org/html/rfc5574).
+ // The stream is a sequence of blocks, one block per RTP packet. Each block
+ // starts with a byte containing the length of the block, in bytes, followed
+ // by one or more frames of Speex data, padded to an integral number of
+ // bytes (octets) as specified in RFC 5574. In other words, each RTP header
+ // is replaced with a single byte containing the block length. Only Speex
+ // wideband is supported. `sample_rate_hertz` must be 16000.
+ AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE = 7;
+}
diff --git a/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/session_entity_type.proto b/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/session_entity_type.proto
new file mode 100644
index 00000000..89408df6
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/session_entity_type.proto
@@ -0,0 +1,184 @@
+// Copyright 2018 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.dialogflow.v2;
+
+import "google/api/annotations.proto";
+import "google/cloud/dialogflow/v2/entity_type.proto";
+import "google/protobuf/empty.proto";
+import "google/protobuf/field_mask.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.Cloud.Dialogflow.V2";
+option go_package = "google.golang.org/genproto/googleapis/cloud/dialogflow/v2;dialogflow";
+option java_multiple_files = true;
+option java_outer_classname = "SessionEntityTypeProto";
+option java_package = "com.google.cloud.dialogflow.v2";
+option objc_class_prefix = "DF";
+
+
+// Entities are extracted from user input and represent parameters that are
+// meaningful to your application. For example, a date range, a proper name
+// such as a geographic location or landmark, and so on. Entities represent
+// actionable data for your application.
+//
+// Session entity types are referred to as **User** entity types and are
+// entities that are built for an individual user such as
+// favorites, preferences, playlists, and so on. You can redefine a session
+// entity type at the session level.
+//
+// For more information about entity types, see the
+// [Dialogflow documentation](https://dialogflow.com/docs/entities).
+service SessionEntityTypes {
+ // Returns the list of all session entity types in the specified session.
+ rpc ListSessionEntityTypes(ListSessionEntityTypesRequest) returns (ListSessionEntityTypesResponse) {
+ option (google.api.http) = {
+ get: "/v2/{parent=projects/*/agent/sessions/*}/entityTypes"
+ };
+ }
+
+ // Retrieves the specified session entity type.
+ rpc GetSessionEntityType(GetSessionEntityTypeRequest) returns (SessionEntityType) {
+ option (google.api.http) = {
+ get: "/v2/{name=projects/*/agent/sessions/*/entityTypes/*}"
+ };
+ }
+
+ // Creates a session entity type.
+ rpc CreateSessionEntityType(CreateSessionEntityTypeRequest) returns (SessionEntityType) {
+ option (google.api.http) = {
+ post: "/v2/{parent=projects/*/agent/sessions/*}/entityTypes"
+ body: "session_entity_type"
+ };
+ }
+
+ // Updates the specified session entity type.
+ rpc UpdateSessionEntityType(UpdateSessionEntityTypeRequest) returns (SessionEntityType) {
+ option (google.api.http) = {
+ patch: "/v2/{session_entity_type.name=projects/*/agent/sessions/*/entityTypes/*}"
+ body: "session_entity_type"
+ };
+ }
+
+ // Deletes the specified session entity type.
+ rpc DeleteSessionEntityType(DeleteSessionEntityTypeRequest) returns (google.protobuf.Empty) {
+ option (google.api.http) = {
+ delete: "/v2/{name=projects/*/agent/sessions/*/entityTypes/*}"
+ };
+ }
+}
+
+// Represents a session entity type.
+//
+// Extends or replaces a developer entity type at the user session level (we
+// refer to the entity types defined at the agent level as "developer entity
+// types").
+//
+// Note: session entity types apply to all queries, regardless of the language.
+message SessionEntityType {
+ // The types of modifications for a session entity type.
+ enum EntityOverrideMode {
+ // Not specified. This value should be never used.
+ ENTITY_OVERRIDE_MODE_UNSPECIFIED = 0;
+
+ // The collection of session entities overrides the collection of entities
+ // in the corresponding developer entity type.
+ ENTITY_OVERRIDE_MODE_OVERRIDE = 1;
+
+ // The collection of session entities extends the collection of entities in
+ // the corresponding developer entity type.
+ // Calls to `ListSessionEntityTypes`, `GetSessionEntityType`,
+ // `CreateSessionEntityType` and `UpdateSessionEntityType` return the full
+ // collection of entities from the developer entity type in the agent's
+ // default language and the session entity type.
+ ENTITY_OVERRIDE_MODE_SUPPLEMENT = 2;
+ }
+
+ // Required. The unique identifier of this session entity type. Format:
+ // `projects//agent/sessions//entityTypes/`.
+ string name = 1;
+
+ // Required. Indicates whether the additional data should override or
+ // supplement the developer entity type definition.
+ EntityOverrideMode entity_override_mode = 2;
+
+ // Required. The collection of entities associated with this session entity
+ // type.
+ repeated EntityType.Entity entities = 3;
+}
+
+// The request message for [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2.SessionEntityTypes.ListSessionEntityTypes].
+message ListSessionEntityTypesRequest {
+ // Required. The session to list all session entity types from.
+ // Format: `projects//agent/sessions/`.
+ string parent = 1;
+
+ // Optional. The maximum number of items to return in a single page. By
+ // default 100 and at most 1000.
+ int32 page_size = 2;
+
+ // Optional. The next_page_token value returned from a previous list request.
+ string page_token = 3;
+}
+
+// The response message for [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2.SessionEntityTypes.ListSessionEntityTypes].
+message ListSessionEntityTypesResponse {
+ // The list of session entity types. There will be a maximum number of items
+ // returned based on the page_size field in the request.
+ repeated SessionEntityType session_entity_types = 1;
+
+ // Token to retrieve the next page of results, or empty if there are no
+ // more results in the list.
+ string next_page_token = 2;
+}
+
+// The request message for [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.GetSessionEntityType].
+message GetSessionEntityTypeRequest {
+ // Required. The name of the session entity type. Format:
+ // `projects//agent/sessions//entityTypes/`.
+ string name = 1;
+}
+
+// The request message for [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.CreateSessionEntityType].
+message CreateSessionEntityTypeRequest {
+ // Required. The session to create a session entity type for.
+ // Format: `projects//agent/sessions/`.
+ string parent = 1;
+
+ // Required. The session entity type to create.
+ SessionEntityType session_entity_type = 2;
+}
+
+// The request message for [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.UpdateSessionEntityType].
+message UpdateSessionEntityTypeRequest {
+ // Required. The entity type to update. Format:
+ // `projects//agent/sessions//entityTypes/`.
+ SessionEntityType session_entity_type = 1;
+
+ // Optional. The mask to control which fields get updated.
+ google.protobuf.FieldMask update_mask = 2;
+}
+
+// The request message for [SessionEntityTypes.DeleteSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.DeleteSessionEntityType].
+message DeleteSessionEntityTypeRequest {
+ // Required. The name of the entity type to delete. Format:
+ // `projects//agent/sessions//entityTypes/`.
+ string name = 1;
+}
diff --git a/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/webhook.proto b/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/webhook.proto
new file mode 100644
index 00000000..24314f86
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/proto/google/cloud/dialogflow/v2/webhook.proto
@@ -0,0 +1,111 @@
+// Copyright 2018 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.dialogflow.v2;
+
+import "google/api/annotations.proto";
+import "google/cloud/dialogflow/v2/context.proto";
+import "google/cloud/dialogflow/v2/intent.proto";
+import "google/cloud/dialogflow/v2/session.proto";
+import "google/protobuf/struct.proto";
+
+option cc_enable_arenas = true;
+option csharp_namespace = "Google.Cloud.Dialogflow.V2";
+option go_package = "google.golang.org/genproto/googleapis/cloud/dialogflow/v2;dialogflow";
+option java_multiple_files = true;
+option java_outer_classname = "WebhookProto";
+option java_package = "com.google.cloud.dialogflow.v2";
+option objc_class_prefix = "DF";
+
+
+// The request message for a webhook call.
+message WebhookRequest {
+ // The unique identifier of detectIntent request session.
+ // Can be used to identify end-user inside webhook implementation.
+ // Format: `projects//agent/sessions/`.
+ string session = 4;
+
+ // The unique identifier of the response. Contains the same value as
+ // `[Streaming]DetectIntentResponse.response_id`.
+ string response_id = 1;
+
+ // The result of the conversational query or event processing. Contains the
+ // same value as `[Streaming]DetectIntentResponse.query_result`.
+ QueryResult query_result = 2;
+
+ // Optional. The contents of the original request that was passed to
+ // `[Streaming]DetectIntent` call.
+ OriginalDetectIntentRequest original_detect_intent_request = 3;
+}
+
+// The response message for a webhook call.
+message WebhookResponse {
+ // Optional. The text to be shown on the screen. This value is passed directly
+ // to `QueryResult.fulfillment_text`.
+ string fulfillment_text = 1;
+
+ // Optional. The collection of rich messages to present to the user. This
+ // value is passed directly to `QueryResult.fulfillment_messages`.
+ repeated Intent.Message fulfillment_messages = 2;
+
+ // Optional. This value is passed directly to `QueryResult.webhook_source`.
+ string source = 3;
+
+ // Optional. This value is passed directly to `QueryResult.webhook_payload`.
+ // See the related `fulfillment_messages[i].payload field`, which may be used
+ // as an alternative to this field.
+ //
+ // This field can be used for Actions on Google responses.
+ // It should have a structure similar to the JSON message shown here. For more
+ // information, see
+ // [Actions on Google Webhook
+ // Format](https://developers.google.com/actions/dialogflow/webhook)
+ // {
+ // "google": {
+ // "expectUserResponse": true,
+ // "richResponse": {
+ // "items": [
+ // {
+ // "simpleResponse": {
+ // "textToSpeech": "this is a simple response"
+ // }
+ // }
+ // ]
+ // }
+ // }
+ // }
+ google.protobuf.Struct payload = 4;
+
+ // Optional. The collection of output contexts. This value is passed directly
+ // to `QueryResult.output_contexts`.
+ repeated Context output_contexts = 5;
+
+ // Optional. Makes the platform immediately invoke another `DetectIntent` call
+ // internally with the specified event as input.
+ EventInput followup_event_input = 6;
+}
+
+// Represents the contents of the original request that was passed to
+// the `[Streaming]DetectIntent` call.
+message OriginalDetectIntentRequest {
+ // The source of this request, e.g., `google`, `facebook`, `slack`. It is set
+ // by Dialogflow-owned servers.
+ string source = 1;
+
+ // Optional. This field is set to the value of `QueryParameters.payload` field
+ // passed in the request.
+ google.protobuf.Struct payload = 3;
+}
diff --git a/dialogflow/Dialogflow/app/src/main/res/drawable/bubble_incoming.xml b/dialogflow/Dialogflow/app/src/main/res/drawable/bubble_incoming.xml
new file mode 100644
index 00000000..3d8f0a1d
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/drawable/bubble_incoming.xml
@@ -0,0 +1,36 @@
+
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/drawable/bubble_outgoing.xml b/dialogflow/Dialogflow/app/src/main/res/drawable/bubble_outgoing.xml
new file mode 100644
index 00000000..a9eb9b61
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/drawable/bubble_outgoing.xml
@@ -0,0 +1,36 @@
+
+
+
+ -
+
+
+
+
+
+
+ -
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/drawable/ic_keyboard.xml b/dialogflow/Dialogflow/app/src/main/res/drawable/ic_keyboard.xml
new file mode 100644
index 00000000..28916547
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/drawable/ic_keyboard.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/drawable/ic_mic.xml b/dialogflow/Dialogflow/app/src/main/res/drawable/ic_mic.xml
new file mode 100644
index 00000000..ee17a8f2
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/drawable/ic_mic.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/drawable/ic_send.xml b/dialogflow/Dialogflow/app/src/main/res/drawable/ic_send.xml
new file mode 100644
index 00000000..573011d2
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/drawable/ic_send.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/layout/activity_main.xml b/dialogflow/Dialogflow/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 00000000..6720d72c
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,78 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/layout/item_conversation.xml b/dialogflow/Dialogflow/app/src/main/res/layout/item_conversation.xml
new file mode 100644
index 00000000..39539403
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/layout/item_conversation.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/mipmap-hdpi/ic_launcher.png b/dialogflow/Dialogflow/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 00000000..41251130
Binary files /dev/null and b/dialogflow/Dialogflow/app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/dialogflow/Dialogflow/app/src/main/res/mipmap-mdpi/ic_launcher.png b/dialogflow/Dialogflow/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 00000000..33da5569
Binary files /dev/null and b/dialogflow/Dialogflow/app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/dialogflow/Dialogflow/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/dialogflow/Dialogflow/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 00000000..40897d71
Binary files /dev/null and b/dialogflow/Dialogflow/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/dialogflow/Dialogflow/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/dialogflow/Dialogflow/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 00000000..584eae61
Binary files /dev/null and b/dialogflow/Dialogflow/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/dialogflow/Dialogflow/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/dialogflow/Dialogflow/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 00000000..a171d040
Binary files /dev/null and b/dialogflow/Dialogflow/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ
diff --git a/dialogflow/Dialogflow/app/src/main/res/values/attrs.xml b/dialogflow/Dialogflow/app/src/main/res/values/attrs.xml
new file mode 100644
index 00000000..be2ccf3b
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/values/attrs.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/values/colors.xml b/dialogflow/Dialogflow/app/src/main/res/values/colors.xml
new file mode 100644
index 00000000..9742b4f5
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/values/colors.xml
@@ -0,0 +1,26 @@
+
+
+
+ #FF5722
+ #E64A19
+ #536DFE
+
+ #FBE9E7
+ #EEEEEE
+ #E0E0E0
+ #616161
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/values/dimens.xml b/dialogflow/Dialogflow/app/src/main/res/values/dimens.xml
new file mode 100644
index 00000000..7e263753
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/values/dimens.xml
@@ -0,0 +1,29 @@
+
+
+
+ 4dp
+ 8dp
+ 16dp
+
+ 4dp
+ 48dp
+ 64dp
+
+ 16dp
+ 16dp
+ 24dp
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/values/strings.xml b/dialogflow/Dialogflow/app/src/main/res/values/strings.xml
new file mode 100644
index 00000000..4676c3bf
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/values/strings.xml
@@ -0,0 +1,25 @@
+
+
+
+ Dialogflow
+ Write a message
+ Speak
+ This app needs to record audio and recognize your speech.
+ Voice
+ Send
+ Keyboard
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/values/styles.xml b/dialogflow/Dialogflow/app/src/main/res/values/styles.xml
new file mode 100644
index 00000000..1e68ac30
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/values/styles.xml
@@ -0,0 +1,25 @@
+
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/app/src/main/res/xml/backup_scheme.xml b/dialogflow/Dialogflow/app/src/main/res/xml/backup_scheme.xml
new file mode 100644
index 00000000..95e99ca6
--- /dev/null
+++ b/dialogflow/Dialogflow/app/src/main/res/xml/backup_scheme.xml
@@ -0,0 +1,22 @@
+
+
+
+
+
+
diff --git a/dialogflow/Dialogflow/build.gradle b/dialogflow/Dialogflow/build.gradle
new file mode 100644
index 00000000..e531f372
--- /dev/null
+++ b/dialogflow/Dialogflow/build.gradle
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2017 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+
+buildscript {
+ repositories {
+ jcenter()
+ google()
+ }
+ dependencies {
+ classpath 'com.android.tools.build:gradle:3.1.4'
+ classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.2'
+ }
+}
+
+allprojects {
+ repositories {
+ jcenter()
+ google()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/dialogflow/Dialogflow/gradle.properties b/dialogflow/Dialogflow/gradle.properties
new file mode 100644
index 00000000..d2fc6cf1
--- /dev/null
+++ b/dialogflow/Dialogflow/gradle.properties
@@ -0,0 +1,21 @@
+# Copyright 2017 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx1536m
+
+# Configuration for API.AI agent
+dialogflowProjectName=android-docs-samples
+dialogflowLanguageCode=en-US
diff --git a/dialogflow/Dialogflow/gradle/wrapper/gradle-wrapper.jar b/dialogflow/Dialogflow/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 00000000..13372aef
Binary files /dev/null and b/dialogflow/Dialogflow/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/dialogflow/Dialogflow/gradle/wrapper/gradle-wrapper.properties b/dialogflow/Dialogflow/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 00000000..c70ba129
--- /dev/null
+++ b/dialogflow/Dialogflow/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Thu Aug 23 10:15:48 JST 2018
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
diff --git a/dialogflow/Dialogflow/gradlew b/dialogflow/Dialogflow/gradlew
new file mode 100755
index 00000000..9d82f789
--- /dev/null
+++ b/dialogflow/Dialogflow/gradlew
@@ -0,0 +1,160 @@
+#!/usr/bin/env bash
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn ( ) {
+ echo "$*"
+}
+
+die ( ) {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+esac
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
+function splitJvmOpts() {
+ JVM_OPTS=("$@")
+}
+eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
+JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
+
+exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/dialogflow/Dialogflow/gradlew.bat b/dialogflow/Dialogflow/gradlew.bat
new file mode 100644
index 00000000..aec99730
--- /dev/null
+++ b/dialogflow/Dialogflow/gradlew.bat
@@ -0,0 +1,90 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windowz variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+if "%@eval[2+2]" == "4" goto 4NT_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+goto execute
+
+:4NT_args
+@rem Get arguments from the 4NT Shell from JP Software
+set CMD_LINE_ARGS=%$
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/dialogflow/Dialogflow/settings.gradle b/dialogflow/Dialogflow/settings.gradle
new file mode 100644
index 00000000..e7b4def4
--- /dev/null
+++ b/dialogflow/Dialogflow/settings.gradle
@@ -0,0 +1 @@
+include ':app'
diff --git a/speech/Speech/app/build.gradle b/speech/Speech/app/build.gradle
index 8399f24e..b03e8470 100644
--- a/speech/Speech/app/build.gradle
+++ b/speech/Speech/app/build.gradle
@@ -18,17 +18,18 @@ apply plugin: 'com.android.application'
apply plugin: 'com.google.protobuf'
ext {
- supportLibraryVersion = '25.4.0'
- grpcVersion = '1.4.0'
+ supportLibraryVersion = '28.0.0'
+ grpcVersion = '1.21.0'
+ protobufVersion = '3.8.0'
}
android {
- compileSdkVersion 25
- buildToolsVersion '25.0.3'
+ compileSdkVersion 28
defaultConfig {
applicationId "com.google.cloud.android.speech"
- targetSdkVersion 25
+ targetSdkVersion 28
+ minSdkVersion 16
versionCode 1
versionName '1.0'
}
@@ -42,17 +43,6 @@ android {
}
}
- productFlavors {
- dev {
- // Minimum version with platform multi-dex support
- minSdkVersion 21
- }
- prod {
- // Minimum version that can run gRPC (TLS extension)
- minSdkVersion 16
- }
- }
-
buildTypes {
debug {
minifyEnabled false
@@ -64,16 +54,11 @@ android {
signingConfig signingConfigs.release
}
}
-
- configurations.all {
- resolutionStrategy.force 'com.google.code.findbugs:jsr305:3.0.2'
- resolutionStrategy.force "com.android.support:support-annotations:$supportLibraryVersion"
- }
}
protobuf {
protoc {
- artifact = 'com.google.protobuf:protoc:3.3.0'
+ artifact = "com.google.protobuf:protoc:$protobufVersion"
}
plugins {
javalite {
@@ -97,33 +82,31 @@ protobuf {
}
dependencies {
- compile fileTree(dir: 'libs', include: ['*.jar'])
-
// Support libraries
- compile "com.android.support:design:$supportLibraryVersion"
- compile "com.android.support:cardview-v7:$supportLibraryVersion"
- compile "com.android.support:recyclerview-v7:$supportLibraryVersion"
+ implementation "com.android.support:design:$supportLibraryVersion"
+ implementation "com.android.support:cardview-v7:$supportLibraryVersion"
+ implementation "com.android.support:recyclerview-v7:$supportLibraryVersion"
// gRPC
- compile "io.grpc:grpc-okhttp:$grpcVersion"
- compile "io.grpc:grpc-protobuf-lite:$grpcVersion"
- compile "io.grpc:grpc-stub:$grpcVersion"
- compile 'javax.annotation:javax.annotation-api:1.2'
- protobuf 'com.google.protobuf:protobuf-java:3.3.1'
+ implementation "io.grpc:grpc-okhttp:$grpcVersion"
+ implementation "io.grpc:grpc-protobuf-lite:$grpcVersion"
+ implementation "io.grpc:grpc-stub:$grpcVersion"
+ implementation 'javax.annotation:javax.annotation-api:1.3.2'
+ protobuf "com.google.protobuf:protobuf-java:$protobufVersion"
// OAuth2 for Google API
- compile('com.google.auth:google-auth-library-oauth2-http:0.7.0') {
+ implementation('com.google.auth:google-auth-library-oauth2-http:0.16.1') {
exclude module: 'httpclient'
}
// Tests
- testCompile 'junit:junit:4.12'
- androidTestCompile 'com.android.support.test:runner:0.5'
- androidTestCompile 'com.android.support.test.espresso:espresso-core:2.2.2'
+ testImplementation 'junit:junit:4.12'
+ androidTestImplementation 'com.android.support.test:runner:1.0.2'
+ androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}
task copySecretKey(type: Copy) {
- def File secretKey = file "$System.env.GOOGLE_APPLICATION_CREDENTIALS"
+ File secretKey = file "$System.env.GOOGLE_APPLICATION_CREDENTIALS"
from secretKey.getParent()
include secretKey.getName()
into 'src/main/res/raw'
diff --git a/speech/Speech/build.gradle b/speech/Speech/build.gradle
index 75f079d1..8e67f15b 100644
--- a/speech/Speech/build.gradle
+++ b/speech/Speech/build.gradle
@@ -22,8 +22,8 @@ buildscript {
maven { url 'https://maven.google.com' }
}
dependencies {
- classpath 'com.android.tools.build:gradle:2.3.3'
- classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.0'
+ classpath 'com.android.tools.build:gradle:3.4.1'
+ classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.8'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
diff --git a/speech/Speech/gradle/wrapper/gradle-wrapper.properties b/speech/Speech/gradle/wrapper/gradle-wrapper.properties
index 618ded6a..26028d52 100644
--- a/speech/Speech/gradle/wrapper/gradle-wrapper.properties
+++ b/speech/Speech/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
-#Mon Apr 03 14:01:53 JST 2017
+#Tue Jun 11 10:38:36 JST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-3.3-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip