Skip to content

Commit

Permalink
feat: discovery supports retries (#967)
Browse files Browse the repository at this point in the history
Adding ability for discovery to retry on eligible http errors
and connection problems with randomized exponential backoff.

Also:
 * DRYing discovery tests to avoid warnings when reading test data.
 * Updating .gitignore

Fixes: #848
  • Loading branch information
DFrenkel committed Jul 15, 2020
1 parent 63f97f3 commit f3348f9
Show file tree
Hide file tree
Showing 7 changed files with 186 additions and 51 deletions.
4 changes: 4 additions & 0 deletions .gitignore
Expand Up @@ -11,3 +11,7 @@ dist/
.coverage
coverage.xml
nosetests.xml

# IDE files
.idea
.vscode
22 changes: 15 additions & 7 deletions googleapiclient/discovery.py
Expand Up @@ -187,6 +187,7 @@ def build(
client_options=None,
adc_cert_path=None,
adc_key_path=None,
num_retries=1,
):
"""Construct a Resource for interacting with an API.
Expand Down Expand Up @@ -223,6 +224,8 @@ def build(
adc_key_path: str, client encrypted private key file path to save the
application default client encrypted private key for mTLS. This field is
required if you want to use the default client certificate.
num_retries: Integer, number of times to retry discovery with
randomized exponential backoff in case of intermittent/connection issues.
Returns:
A Resource object with methods for interacting with the service.
Expand All @@ -243,7 +246,8 @@ def build(

try:
content = _retrieve_discovery_doc(
requested_url, discovery_http, cache_discovery, cache, developerKey
requested_url, discovery_http, cache_discovery, cache,
developerKey, num_retries=num_retries
)
return build_from_document(
content,
Expand All @@ -266,7 +270,8 @@ def build(
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName, version))


def _retrieve_discovery_doc(url, http, cache_discovery, cache=None, developerKey=None):
def _retrieve_discovery_doc(url, http, cache_discovery,
cache=None, developerKey=None, num_retries=1):
"""Retrieves the discovery_doc from cache or the internet.
Args:
Expand All @@ -276,13 +281,16 @@ def _retrieve_discovery_doc(url, http, cache_discovery, cache=None, developerKey
cache_discovery: Boolean, whether or not to cache the discovery doc.
cache: googleapiclient.discovery_cache.base.Cache, an optional cache
object for the discovery documents.
developerKey: string, Key for controlling API usage, generated
from the API Console.
num_retries: Integer, number of times to retry discovery with
randomized exponential backoff in case of intermittent/connection issues.
Returns:
A unicode string representation of the discovery document.
"""
if cache_discovery:
from . import discovery_cache
from .discovery_cache import base

if cache is None:
cache = discovery_cache.autodetect()
Expand All @@ -302,10 +310,10 @@ def _retrieve_discovery_doc(url, http, cache_discovery, cache=None, developerKey
actual_url = _add_query_parameter(url, "key", developerKey)
logger.debug("URL being requested: GET %s", actual_url)

resp, content = http.request(actual_url)

if resp.status >= 400:
raise HttpError(resp, content, uri=actual_url)
# Execute this request with retries build into HttpRequest
# Note that it will already raise an error if we don't get a 2xx response
req = HttpRequest(http, HttpRequest.null_postproc, actual_url)
resp, content = req.execute(num_retries=num_retries)

try:
content = content.decode("utf-8")
Expand Down
16 changes: 11 additions & 5 deletions googleapiclient/http.py
Expand Up @@ -1118,6 +1118,10 @@ def from_json(s, http, postproc):
resumable=d["resumable"],
)

@staticmethod
def null_postproc(resp, contents):
return resp, contents


class BatchHttpRequest(object):
"""Batches multiple HttpRequest objects into a single HTTP request.
Expand Down Expand Up @@ -1168,7 +1172,7 @@ def __init__(self, callback=None, batch_uri=None):
batch_uri = _LEGACY_BATCH_URI

if batch_uri == _LEGACY_BATCH_URI:
LOGGER.warn(
LOGGER.warning(
"You have constructed a BatchHttpRequest using the legacy batch "
"endpoint %s. This endpoint will be turned down on August 12, 2020. "
"Please provide the API-specific endpoint or use "
Expand Down Expand Up @@ -1416,7 +1420,7 @@ def _execute(self, http, order, requests):
http: httplib2.Http, an http object to be used to make the request with.
order: list, list of request ids in the order they were added to the
batch.
request: list, list of request objects to send.
requests: list, list of request objects to send.
Raises:
httplib2.HttpLib2Error if a transport error has occurred.
Expand Down Expand Up @@ -1690,9 +1694,8 @@ def __init__(self, filename=None, headers=None):
if headers is None:
headers = {"status": "200"}
if filename:
f = open(filename, "rb")
self.data = f.read()
f.close()
with open(filename, "rb") as f:
self.data = f.read()
else:
self.data = None
self.response_headers = headers
Expand Down Expand Up @@ -1749,6 +1752,7 @@ def __init__(self, iterable):
"""
self._iterable = iterable
self.follow_redirects = True
self.request_sequence = list()

def request(
self,
Expand All @@ -1759,6 +1763,8 @@ def request(
redirections=1,
connection_type=None,
):
# Remember the request so after the fact this mock can be examined
self.request_sequence.append((uri, method, body, headers))
resp, content = self._iterable.pop(0)
content = six.ensure_binary(content)

Expand Down
13 changes: 13 additions & 0 deletions tests/data/500.json
@@ -0,0 +1,13 @@
{
"error": {
"errors": [
{
"domain": "global",
"reason": "internalError",
"message": "We encountered an internal error. Please try again using truncated exponential backoff."
}
],
"code": 500,
"message": "Internal Server Error"
}
}
13 changes: 13 additions & 0 deletions tests/data/503.json
@@ -0,0 +1,13 @@
{
"error": {
"errors": [
{
"domain": "global",
"reason": "backendError",
"message": "We encountered an internal error. Please try again using truncated exponential backoff."
}
],
"code": 503,
"message": "Service Unavailable"
}
}

0 comments on commit f3348f9

Please sign in to comment.