Skip to content

Commit

Permalink
Create a prometheus client
Browse files Browse the repository at this point in the history
Part of the refactoring to organize the codebase. This moves the methods to
talk to prometheus into a class in its own module.

The unit tests have also been moved to a separate file.

closes #69
  • Loading branch information
naved001 committed Sep 24, 2024
1 parent be997dc commit 49ba5d6
Show file tree
Hide file tree
Showing 6 changed files with 87 additions and 70 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/unit-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,4 @@ jobs:
- name: Run unit tests
run: |
python -m unittest openshift_metrics/tests/test_utils.py
python -m unittest openshift_metrics/tests/test_*
14 changes: 8 additions & 6 deletions openshift_metrics/openshift_prometheus_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import argparse
from datetime import datetime, timedelta
from prometheus_client import PrometheusClient
import os
import sys
import json
Expand Down Expand Up @@ -77,24 +78,25 @@ def main():


token = os.environ.get("OPENSHIFT_TOKEN")
prom_client = PrometheusClient(openshift_url, token)

metrics_dict = {}
metrics_dict["start_date"] = report_start_date
metrics_dict["end_date"] = report_end_date

cpu_request_metrics = utils.query_metric(
openshift_url, token, CPU_REQUEST, report_start_date, report_end_date
cpu_request_metrics = prom_client.query_metric(
CPU_REQUEST, report_start_date, report_end_date
)
memory_request_metrics = utils.query_metric(
openshift_url, token, MEMORY_REQUEST, report_start_date, report_end_date
memory_request_metrics = prom_client.query_metric(
MEMORY_REQUEST, report_start_date, report_end_date
)
metrics_dict["cpu_metrics"] = cpu_request_metrics
metrics_dict["memory_metrics"] = memory_request_metrics

# because if nobody requests a GPU then we will get an empty set
try:
gpu_request_metrics = utils.query_metric(
openshift_url, token, GPU_REQUEST, report_start_date, report_end_date
gpu_request_metrics = prom_client.query_metric(
GPU_REQUEST, report_start_date, report_end_date
)
metrics_dict["gpu_metrics"] = gpu_request_metrics
except utils.EmptyResultError:
Expand Down
41 changes: 41 additions & 0 deletions openshift_metrics/prometheus_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import requests
import time

from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
from openshift_metrics.utils import EmptyResultError

class PrometheusClient:
def __init__(self, prometheus_url: str, token: str, step_min: int=15):
self.prometheus_url = prometheus_url
self.token = token
self.step_min = step_min

def query_metric(self, metric, start_date, end_date):
"""Queries metric from the provided prometheus_url"""
data = None
headers = {"Authorization": f"Bearer {self.token}"}
day_url_vars = f"start={start_date}T00:00:00Z&end={end_date}T23:59:59Z"
url = f"{self.prometheus_url}/api/v1/query_range?query={metric}&{day_url_vars}&step={self.step_min}m"

retries = Retry(total=3, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504])
session = requests.Session()
session.mount("https://", HTTPAdapter(max_retries=retries))

print(f"Retrieving metric: {metric}")

for _ in range(3):
response = session.get(url, headers=headers, verify=True)

if response.status_code != 200:
print(f"{response.status_code} Response: {response.reason}")
else:
data = response.json()["data"]["result"]
if data:
break
print("Empty result set")
time.sleep(3)

if not data:
raise EmptyResultError(f"Error retrieving metric: {metric}")
return data
37 changes: 37 additions & 0 deletions openshift_metrics/tests/test_prometheus_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from requests.exceptions import ConnectionError
from unittest import TestCase, mock

from openshift_metrics.prometheus_client import PrometheusClient

class TestQueryMetric(TestCase):

@mock.patch('requests.Session.get')
@mock.patch('time.sleep')
def test_query_metric(self, mock_sleep, mock_get):
mock_response = mock.Mock(status_code=200)
mock_response.json.return_value = {"data": {
"result": "this is data"
}}
mock_get.return_value = mock_response
prom_client = PrometheusClient('https://fake-url', 'fake-token')
metrics = prom_client.query_metric('fake-metric', '2022-03-14', '2022-03-14')
self.assertEqual(metrics, "this is data")
self.assertEqual(mock_get.call_count, 1)

@mock.patch('requests.Session.get')
@mock.patch('time.sleep')
def test_query_metric_exception(self, mock_sleep, mock_get):
mock_get.return_value = mock.Mock(status_code=404)
prom_client = PrometheusClient('https://fake-url', 'fake-token')
self.assertRaises(Exception, prom_client.query_metric,
'fake-metric', '2022-03-14', '2022-03-14')
self.assertEqual(mock_get.call_count, 3)

@mock.patch('requests.Session.get')
@mock.patch('time.sleep')
def test_query_metric_connection_error(self, mock_sleep, mock_get):
mock_get.side_effect = [ConnectionError]
prom_client = PrometheusClient('https://fake-url', 'fake-token')
self.assertRaises(ConnectionError, prom_client.query_metric,
'fake-metric', '2022-03-14', '2022-03-14')
self.assertEqual(mock_get.call_count, 1)
33 changes: 0 additions & 33 deletions openshift_metrics/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,39 +17,6 @@
from openshift_metrics import utils
import os

class TestQueryMetric(TestCase):

@mock.patch('requests.Session.get')
@mock.patch('time.sleep')
def test_query_metric(self, mock_sleep, mock_get):
mock_response = mock.Mock(status_code=200)
mock_response.json.return_value = {"data": {
"result": "this is data"
}}
mock_get.return_value = mock_response

metrics = utils.query_metric('https://fake-url', 'fake-token', 'fake-metric', '2022-03-14', '2022-03-14')
self.assertEqual(metrics, "this is data")
self.assertEqual(mock_get.call_count, 1)

@mock.patch('requests.Session.get')
@mock.patch('time.sleep')
def test_query_metric_exception(self, mock_sleep, mock_get):
mock_get.return_value = mock.Mock(status_code=404)

self.assertRaises(Exception, utils.query_metric, 'https://fake-url', 'fake-token',
'fake-metric', '2022-03-14', '2022-03-14')
self.assertEqual(mock_get.call_count, 3)

@mock.patch('requests.Session.get')
@mock.patch('time.sleep')
def test_query_metric_connection_error(self, mock_sleep, mock_get):
mock_get.side_effect = [ConnectionError]
self.assertRaises(ConnectionError, utils.query_metric, 'https://fake-url', 'fake-token',
'fake-metric', '2022-03-14', '2022-03-14')
self.assertEqual(mock_get.call_count, 1)


class TestGetNamespaceAnnotations(TestCase):

@mock.patch('openshift_metrics.utils.requests.post')
Expand Down
30 changes: 0 additions & 30 deletions openshift_metrics/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,36 +111,6 @@ def upload_to_s3(file, bucket, location):
response = s3.upload_file(file, Bucket=bucket, Key=location)


def query_metric(openshift_url, token, metric, report_start_date, report_end_date):
"""Queries metric from prometheus/thanos for the provided openshift_url"""
data = None
headers = {"Authorization": f"Bearer {token}"}
day_url_vars = f"start={report_start_date}T00:00:00Z&end={report_end_date}T23:59:59Z"
url = f"{openshift_url}/api/v1/query_range?query={metric}&{day_url_vars}&step={STEP_MIN}m"

retries = Retry(total=3, backoff_factor=1, status_forcelist=[429, 500, 502, 503, 504])
session = requests.Session()
session.mount("https://", HTTPAdapter(max_retries=retries))

print(f"Retrieving metric: {metric}")

for _ in range(3):
response = session.get(url, headers=headers, verify=True)

if response.status_code != 200:
print(f"{response.status_code} Response: {response.reason}")
else:
data = response.json()["data"]["result"]
if data:
break
print("Empty result set")
time.sleep(3)

if not data:
raise EmptyResultError(f"Error retrieving metric: {metric}")
return data


def get_namespace_attributes():
"""
Returns allocation attributes from coldfront associated
Expand Down

0 comments on commit 49ba5d6

Please sign in to comment.