Skip to content

Instantly share code, notes, and snippets.

@kgriffs
Last active November 27, 2023 17:26
Show Gist options
  • Save kgriffs/e717b8669b9b099b82ac40e11ed25e1a to your computer and use it in GitHub Desktop.
Save kgriffs/e717b8669b9b099b82ac40e11ed25e1a to your computer and use it in GitHub Desktop.

Revisions

  1. kgriffs revised this gist Nov 27, 2023. 1 changed file with 38 additions and 18 deletions.
    56 changes: 38 additions & 18 deletions datadog_fetch_logs.py
    Original file line number Diff line number Diff line change
    @@ -2,10 +2,9 @@
    import time


    class DataDogLogFetcher:
    class DatadogLogIterator:
    _MAX_LIMIT = 1_000
    _FETCH_LIMIT = _MAX_LIMIT
    _SLEEP_SEC = 2

    def __init__(self, query, start, end, api_key, app_key):
    self._cursor = None
    @@ -16,37 +15,58 @@ def __init__(self, query, start, end, api_key, app_key):
    self._start_timestamp = start.isoformat()
    self._end_timestamp = end.isoformat()

    def __iter__(self):
    return self

    def _next(self):
    # Make the API request to get log events
    url = f'https://api.datadoghq.com/api/v2/logs/events/search'
    headers = {'Content-Type': 'application/json', 'DD-API-KEY': self._api_key, 'DD-APPLICATION-KEY': self._app_key}
    def __next__(self):
    url = f"https://api.datadoghq.com/api/v2/logs/events/search"
    headers = {"Content-Type": "application/json", "DD-API-KEY": self._api_key, "DD-APPLICATION-KEY": self._app_key}
    params = {
    'filter': {'query': self._query, 'from': self._start_timestamp, 'to': self._end_timestamp},
    'page': {'limit': self._FETCH_LIMIT},
    "filter": {"query": self._query, "from": self._start_timestamp, "to": self._end_timestamp},
    "page": {"limit": self._FETCH_LIMIT},
    }

    if self._cursor:
    params['page']['cursor'] = self._cursor
    params["page"]["cursor"] = self._cursor

    response = requests.post(url, headers=headers, json=params)
    response.raise_for_status()
    response = requests.post(url, headers=headers, json=params)

    if response.status_code != 200:
    print(f"ERROR: Datadog API returned {response.status_code}")
    return None

    result = response.json()

    try:
    self._cursor = result['meta']['page']['after']
    self._cursor = result["meta"]["page"]["after"]
    except KeyError:
    return None
    raise StopIteration

    return result['data'] or None
    event_batch = result["data"]

    def fetch(self, limit):
    all_events = []
    if not event_batch:
    raise StopIteration

    return event_batch


    class DatadogLogFetcher:
    _SLEEP_SEC = 3

    while len(all_events) < limit and (batch := self._next()):
    def __init__(self, api_key, app_key):
    self._api_key = api_key
    self._app_key = app_key

    def fetch(self, query, start, end, limit):
    log_iterator = DatadogLogIterator(query, start, end, self._api_key, self._app_key)

    all_events = []
    for batch in log_iterator:
    all_events += batch
    print(f"Fetched a batch of {len(batch)} log events; {len(all_events)} total fetched so far...")
    print(f"Fetched a batch of {len(batch)} log events; {len(all_events):>7} total fetched so far...")
    time.sleep(self._SLEEP_SEC)

    if len(all_events) >= limit:
    break

    return all_events
  2. kgriffs revised this gist Nov 21, 2023. 1 changed file with 9 additions and 1 deletion.
    10 changes: 9 additions & 1 deletion datadog_fetch_logs.py
    Original file line number Diff line number Diff line change
    @@ -1,3 +1,7 @@
    import requests
    import time


    class DataDogLogFetcher:
    _MAX_LIMIT = 1_000
    _FETCH_LIMIT = _MAX_LIMIT
    @@ -30,7 +34,11 @@ def _next(self):

    result = response.json()

    self._cursor = result['meta']['page']['after']
    try:
    self._cursor = result['meta']['page']['after']
    except KeyError:
    return None

    return result['data'] or None

    def fetch(self, limit):
  3. kgriffs revised this gist Nov 21, 2023. 1 changed file with 4 additions and 9 deletions.
    13 changes: 4 additions & 9 deletions datadog_fetch_logs.py
    Original file line number Diff line number Diff line change
    @@ -1,12 +1,7 @@
    import json
    import requests
    import time


    class DatadogLogFetcher:
    class DataDogLogFetcher:
    _MAX_LIMIT = 1_000
    _FETCH_LIMIT = 500
    _SLEEP_SEC = 1
    _FETCH_LIMIT = _MAX_LIMIT
    _SLEEP_SEC = 2

    def __init__(self, query, start, end, api_key, app_key):
    self._cursor = None
    @@ -23,7 +18,7 @@ def _next(self):
    url = f'https://api.datadoghq.com/api/v2/logs/events/search'
    headers = {'Content-Type': 'application/json', 'DD-API-KEY': self._api_key, 'DD-APPLICATION-KEY': self._app_key}
    params = {
    'filter': {'query': query, 'from': self._start_timestamp, 'to': self._end_timestamp},
    'filter': {'query': self._query, 'from': self._start_timestamp, 'to': self._end_timestamp},
    'page': {'limit': self._FETCH_LIMIT},
    }

  4. kgriffs revised this gist Nov 21, 2023. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion datadog_fetch_logs.py
    Original file line number Diff line number Diff line change
    @@ -3,7 +3,7 @@
    import time


    class DataDogLogFetcher:
    class DatadogLogFetcher:
    _MAX_LIMIT = 1_000
    _FETCH_LIMIT = 500
    _SLEEP_SEC = 1
  5. kgriffs created this gist Nov 21, 2023.
    49 changes: 49 additions & 0 deletions datadog_fetch_logs.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,49 @@
    import json
    import requests
    import time


    class DataDogLogFetcher:
    _MAX_LIMIT = 1_000
    _FETCH_LIMIT = 500
    _SLEEP_SEC = 1

    def __init__(self, query, start, end, api_key, app_key):
    self._cursor = None

    self._query = query
    self._api_key = api_key
    self._app_key = app_key
    self._start_timestamp = start.isoformat()
    self._end_timestamp = end.isoformat()


    def _next(self):
    # Make the API request to get log events
    url = f'https://api.datadoghq.com/api/v2/logs/events/search'
    headers = {'Content-Type': 'application/json', 'DD-API-KEY': self._api_key, 'DD-APPLICATION-KEY': self._app_key}
    params = {
    'filter': {'query': query, 'from': self._start_timestamp, 'to': self._end_timestamp},
    'page': {'limit': self._FETCH_LIMIT},
    }

    if self._cursor:
    params['page']['cursor'] = self._cursor

    response = requests.post(url, headers=headers, json=params)
    response.raise_for_status()

    result = response.json()

    self._cursor = result['meta']['page']['after']
    return result['data'] or None

    def fetch(self, limit):
    all_events = []

    while len(all_events) < limit and (batch := self._next()):
    all_events += batch
    print(f"Fetched a batch of {len(batch)} log events; {len(all_events)} total fetched so far...")
    time.sleep(self._SLEEP_SEC)

    return all_events