Skip to content
This repository has been archived by the owner on Sep 5, 2023. It is now read-only.

Commit

Permalink
docs(language): edit hyphenation of "part-of-speech" (via synth) (#9954)
Browse files Browse the repository at this point in the history
  • Loading branch information
yoshi-automation authored and busunkim96 committed Dec 10, 2019
1 parent 5e31af7 commit 6246ef9
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 24 deletions.
Expand Up @@ -391,7 +391,7 @@ def analyze_syntax(
):
"""
Analyzes the syntax of the text and provides sentence boundaries and
tokenization along with part of speech tags, dependency trees, and other
tokenization along with part-of-speech tags, dependency trees, and other
properties.
Example:
Expand Down
Expand Up @@ -156,7 +156,7 @@ def analyze_syntax(self):
"""Return the gRPC stub for :meth:`LanguageServiceClient.analyze_syntax`.
Analyzes the syntax of the text and provides sentence boundaries and
tokenization along with part of speech tags, dependency trees, and other
tokenization along with part-of-speech tags, dependency trees, and other
properties.
Returns:
Expand Down
32 changes: 16 additions & 16 deletions google/cloud/language_v1beta2/proto/language_service.proto
Expand Up @@ -69,7 +69,7 @@ service LanguageService {
}

// Analyzes the syntax of the text and provides sentence boundaries and
// tokenization along with part of speech tags, dependency trees, and other
// tokenization along with part-of-speech tags, dependency trees, and other
// properties.
rpc AnalyzeSyntax(AnalyzeSyntaxRequest) returns (AnalyzeSyntaxResponse) {
option (google.api.http) = {
Expand Down Expand Up @@ -272,6 +272,21 @@ message Entity {
Sentiment sentiment = 6;
}

// Represents the smallest syntactic building block of the text.
message Token {
// The token text.
TextSpan text = 1;

// Parts of speech tag for this token.
PartOfSpeech part_of_speech = 2;

// Dependency tree parse for this token.
DependencyEdge dependency_edge = 3;

// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
string lemma = 4;
}

// Represents the text encoding that the caller uses to process the output.
// Providing an `EncodingType` is recommended because the API provides the
// beginning offsets for various outputs, such as tokens and mentions, and
Expand All @@ -298,21 +313,6 @@ enum EncodingType {
UTF32 = 3;
}

// Represents the smallest syntactic building block of the text.
message Token {
// The token text.
TextSpan text = 1;

// Parts of speech tag for this token.
PartOfSpeech part_of_speech = 2;

// Dependency tree parse for this token.
DependencyEdge dependency_edge = 3;

// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
string lemma = 4;
}

// Represents the feeling associated with the entire text or entities in
// the text.
// Next ID: 6
Expand Down
Expand Up @@ -80,7 +80,7 @@ def AnalyzeEntitySentiment(self, request, context):

def AnalyzeSyntax(self, request, context):
"""Analyzes the syntax of the text and provides sentence boundaries and
tokenization along with part of speech tags, dependency trees, and other
tokenization along with part-of-speech tags, dependency trees, and other
properties.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
Expand Down
10 changes: 5 additions & 5 deletions synth.metadata
@@ -1,19 +1,19 @@
{
"updateTime": "2019-11-12T13:32:17.833551Z",
"updateTime": "2019-12-10T13:18:39.049538Z",
"sources": [
{
"generator": {
"name": "artman",
"version": "0.41.1",
"dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e"
"version": "0.42.1",
"dockerImage": "googleapis/artman@sha256:c773192618c608a7a0415dd95282f841f8e6bcdef7dd760a988c93b77a64bd57"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
"sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8",
"internalRef": "279774957"
"sha": "6cc9499e225a4f6a5e34fe07e390f67055d7991c",
"internalRef": "284643689"
}
},
{
Expand Down

0 comments on commit 6246ef9

Please sign in to comment.