From d57e0a4fe7c8b682f98b139689cd708a335e99c6 Mon Sep 17 00:00:00 2001 From: nk-hystax <128669932+nk-hystax@users.noreply.github.com> Date: Wed, 23 Oct 2024 12:11:15 +0300 Subject: [PATCH 1/3] OS-7828. Allow manual reimport for cloud accounts --- diworker/diworker/importers/azure.py | 23 ++-- diworker/diworker/importers/gcp.py | 21 +++- .../controllers/cloud_account.py | 11 +- rest_api/rest_api_server/exceptions.py | 10 ++ .../handlers/v2/cloud_account.py | 97 ++++++++++++---- .../tests/unittests/test_cloud_accounts.py | 109 +++++++++++++++--- 6 files changed, 214 insertions(+), 57 deletions(-) diff --git a/diworker/diworker/importers/azure.py b/diworker/diworker/importers/azure.py index 3804e222d..392cf1cb6 100644 --- a/diworker/diworker/importers/azure.py +++ b/diworker/diworker/importers/azure.py @@ -127,15 +127,20 @@ def str_from_datetime(self, date_obj): '%Y-%m-%dT%H:%M:%S.%fZ') def detect_period_start(self): - # When choosing period_start for Azure, prioritize last expense date - # over date of the last import run. That is because for Azure the latest - # expenses are not available immediately and we need to load these - # expenses again on the next run. - last_import_at = self.get_last_import_date(self.cloud_acc_id) - if last_import_at: - self.period_start = last_import_at.replace( - hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1) - else: + ca_last_import_at = self.cloud_acc.get('last_import_at') + if (ca_last_import_at and datetime.utcfromtimestamp( + ca_last_import_at).month == datetime.now( + tz=timezone.utc).month): + # When choosing period_start for Azure, prioritize last expense + # date over date of the last import run. That is because for Azure + # the latest expenses are not available immediately and we need to + # load these expenses again on the next run. + last_exp_date = self.get_last_import_date(self.cloud_acc_id) + if last_exp_date: + self.period_start = last_exp_date.replace( + hour=0, minute=0, second=0, microsecond=0) - timedelta( + days=1) + if not self.period_start: super().detect_period_start() @retry_backoff(AzureConsumptionException, diff --git a/diworker/diworker/importers/gcp.py b/diworker/diworker/importers/gcp.py index 71fe3298d..435ddf945 100644 --- a/diworker/diworker/importers/gcp.py +++ b/diworker/diworker/importers/gcp.py @@ -1,7 +1,7 @@ from collections import defaultdict import hashlib import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from diworker.diworker.importers.base import BaseReportImporter LOG = logging.getLogger(__name__) @@ -15,11 +15,20 @@ class GcpReportImporter(BaseReportImporter): def detect_period_start(self): - last_import_at = self.get_last_import_date(self.cloud_acc_id) - if last_import_at: - self.period_start = last_import_at.replace( - hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1) - else: + ca_last_import_at = self.cloud_acc.get('last_import_at') + if (ca_last_import_at and datetime.utcfromtimestamp( + ca_last_import_at).month == datetime.now( + tz=timezone.utc).month): + # When choosing period_start for GCP, prioritize last expense + # date over date of the last import run. That is because for GCP + # the latest expenses are not available immediately and we need to + # load these expenses again on the next run. + last_exp_date = self.get_last_import_date(self.cloud_acc_id) + if last_exp_date: + self.period_start = last_exp_date.replace( + hour=0, minute=0, second=0, microsecond=0) - timedelta( + days=1) + if not self.period_start: super().detect_period_start() def get_unique_field_list(self): diff --git a/rest_api/rest_api_server/controllers/cloud_account.py b/rest_api/rest_api_server/controllers/cloud_account.py index 672839d05..3665a039f 100644 --- a/rest_api/rest_api_server/controllers/cloud_account.py +++ b/rest_api/rest_api_server/controllers/cloud_account.py @@ -5,7 +5,7 @@ from optscale_client.herald_client.client_v2 import Client as HeraldClient -from sqlalchemy import Enum, true, or_ +from sqlalchemy import Enum, true from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import and_, exists from tools.cloud_adapter.exceptions import ( @@ -48,7 +48,8 @@ CloudAccount, DiscoveryInfo, Organization, Pool) from rest_api.rest_api_server.models.enums import CloudTypes, ConditionTypes from rest_api.rest_api_server.controllers.base import BaseController -from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper +from rest_api.rest_api_server.controllers.base_async import ( + BaseAsyncControllerWrapper) from rest_api.rest_api_server.utils import ( check_bool_attribute, check_dict_attribute, check_float_attribute, check_int_attribute, check_string, check_string_attribute, @@ -90,7 +91,8 @@ def _check_organization(self, organization_id): organization = OrganizationController( self.session, self._config, self.token).get(organization_id) if organization is None: - raise NotFoundException(Err.OE0005, [Organization.__name__, organization_id]) + raise NotFoundException( + Err.OE0005, [Organization.__name__, organization_id]) def _validate(self, cloud_acc, is_new=True, **kwargs): org_id = kwargs.get('organization_id') @@ -518,7 +520,8 @@ def edit(self, item_id, **kwargs): self._publish_validation_warnings_activities(updated_cloud_account, warnings) for import_f in ['last_import_at', 'last_import_modified_at', - 'last_import_attempt_at', 'last_import_attempt_error']: + 'last_import_attempt_at', + 'last_import_attempt_error']: kwargs.pop(import_f, None) else: updated_cloud_account = cloud_acc_obj diff --git a/rest_api/rest_api_server/exceptions.py b/rest_api/rest_api_server/exceptions.py index c82839319..c515d58c3 100644 --- a/rest_api/rest_api_server/exceptions.py +++ b/rest_api/rest_api_server/exceptions.py @@ -1018,3 +1018,13 @@ class Err(enum.Enum): ['name', 'resource_type'], ['dev-1', 'instance'] ] + OE0559 = [ + "\"%s\" date should be between a month and a year ago", + ['last_import_at'], + [] + ] + OE0560 = [ + "Changing import dates is not supported for \"%s\" cloud account type", + ['environment'], + [] + ] diff --git a/rest_api/rest_api_server/handlers/v2/cloud_account.py b/rest_api/rest_api_server/handlers/v2/cloud_account.py index 39a340c3d..f57eb314a 100644 --- a/rest_api/rest_api_server/handlers/v2/cloud_account.py +++ b/rest_api/rest_api_server/handlers/v2/cloud_account.py @@ -1,16 +1,19 @@ import json - +from datetime import datetime, timezone from tools.optscale_exceptions.common_exc import (NotFoundException, ForbiddenException) +from tools.optscale_exceptions.common_exc import WrongArgumentsException from tools.optscale_exceptions.http_exc import OptHTTPError from rest_api.rest_api_server.models.enums import CloudTypes -from rest_api.rest_api_server.controllers.cloud_account import CloudAccountAsyncController +from rest_api.rest_api_server.controllers.cloud_account import ( + CloudAccountAsyncController) from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.handlers.v1.base_async import ( BaseAsyncCollectionHandler, BaseAsyncItemHandler) from rest_api.rest_api_server.handlers.v2.base import BaseHandler from rest_api.rest_api_server.handlers.v1.base import BaseAuthHandler -from rest_api.rest_api_server.utils import run_task, ModelEncoder +from rest_api.rest_api_server.utils import ( + check_int_attribute, check_string_attribute, run_task, ModelEncoder) class CloudAccountAsyncCollectionHandler(BaseAsyncCollectionHandler, @@ -446,6 +449,48 @@ async def get(self, id, **kwargs): result['details'] = res self.write(json.dumps(result, cls=ModelEncoder)) + @staticmethod + def _validate_params(cloud_acc, **kwargs): + secret = kwargs.get('secret') + validate_map = { + 'last_import_attempt_at': check_int_attribute, + 'last_import_attempt_error': check_string_attribute + } + for param, func in validate_map.items(): + if not secret and param in kwargs: + raise OptHTTPError( + 400, Err.OE0449, [param, 'cloud account']) + value = kwargs.get(param) + if value: + try: + func(param, value) + except WrongArgumentsException as exc: + raise OptHTTPError.from_opt_exception(400, exc) + + for param in ['last_import_at', 'last_import_modified_at']: + value = kwargs.get(param) + if value: + if not secret and cloud_acc.type in [CloudTypes.ENVIRONMENT, + CloudTypes.KUBERNETES_CNR]: + raise OptHTTPError( + 400, Err.OE0560, [cloud_acc.type.value]) + + try: + check_int_attribute(param, value) + except WrongArgumentsException as exc: + raise OptHTTPError.from_opt_exception(400, exc) + if not secret: + # dates should be less than a year ago and not a date in + # the current month if updated by a token + now = datetime.now(tz=timezone.utc) + min_date = int( + now.replace(year=now.year - 1).timestamp()) + max_date = int(now.replace( + day=1, hour=0, minute=0, second=0, + microsecond=0).timestamp()) - 1 + if value < min_date or value > max_date: + raise OptHTTPError(400, Err.OE0559, [param]) + async def patch(self, id, **kwargs): """ --- @@ -469,30 +514,38 @@ async def patch(self, id, **kwargs): properties: last_import_at: type: integer - description: Attention! This field is for internal use, it is undesirable to change it! - UTC timestamp of last successful data import + description: | + timestamp of last successful data import last_import_attempt_at: type: integer - description: Attention! This field is for internal use, it is undesirable to change it! - UTC timestamp of last data import attempt + description: | + Attention! This field is for internal use, it is + undesirable to change it! UTC timestamp of last + data import attempt last_import_attempt_error: type: string - description: Attention! This field is for internal use, it is undesirable to change it! - Error message of last data import attempt, null if no error + description: | + Attention! This field is for internal use, it is + undesirable to change it! Error message of last + data import attempt, null if no error last_import_modified_at: type: integer - description: Attention! This field is for internal use, it is undesirable to change it! - Last imported report modification time in timestamp format + description: | + Last imported report modification time in + timestamp format cleaned_at: type: integer - description: Attention! This field is for internal use, it is undesirable to change it! - UTC timestamp of date when cloud account was cleaned up + description: | + Attention! This field is for internal use, it is + undesirable to change it! UTC timestamp of date + when cloud account was cleaned up name: type: string description: Cloud account name process_recommendations: type: boolean - description: Is recommendations enabled? Default is True + description: | + Is recommendations enabled? Default is True config: type: object description: | @@ -519,6 +572,8 @@ async def patch(self, id, **kwargs): - OE0371: Unable to configure billing report - OE0437: Can’t connect the cloud subscription - OE0449: Parameter of cloud account can\'t be changed + - OE0559: Parameter date should be between a month and a year ago + - OE0560: Changing import dates is not supported for cloud account type 401: description: | Unauthorized: @@ -545,16 +600,16 @@ async def patch(self, id, **kwargs): - token: [] - secret: [] """ + data = self._request_body() + secret = True if not self.check_cluster_secret(raises=False): - data = self._request_body() - not_changeable_param_list = ['last_import_at', 'last_import_modified_at', - 'last_import_attempt_at', 'last_import_attempt_error'] - for param in not_changeable_param_list: - if data.get(param): - raise OptHTTPError(400, Err.OE0449, [param, 'cloud account']) await self.check_permissions('MANAGE_CLOUD_CREDENTIALS', 'cloud_account', id) - await super().patch(id, **kwargs) + secret = False + item = await self._get_item(id) + self._validate_params(item, **data, secret=secret) + res = await run_task(self.controller.edit, id, **data) + self.write(res.to_json()) async def delete(self, id, **kwargs): """ diff --git a/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py b/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py index 0af9f5a1e..13d679620 100644 --- a/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py +++ b/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py @@ -11,7 +11,8 @@ from rest_api.rest_api_server.models.db_base import BaseDB from rest_api.rest_api_server.models.db_factory import DBType, DBFactory from rest_api.rest_api_server.utils import decode_config -from rest_api.rest_api_server.controllers.cloud_account import CloudAccountController +from rest_api.rest_api_server.controllers.cloud_account import ( + CloudAccountController) from tools.cloud_adapter.exceptions import ReportConfigurationException from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.models.enums import CloudTypes @@ -411,25 +412,25 @@ def test_update_not_changed_params(self): code, cloud_acc = self.create_cloud_account( self.org_id, self.valid_aws_cloud_acc) self.assertEqual(code, 201) - + now = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()) code, resp = self.client.cloud_account_update( - cloud_acc['id'], {'last_import_at': 123, - 'last_import_modified_at': 321}) + cloud_acc['id'], {'last_import_at': now, + 'last_import_modified_at': now}) self.assertEqual(code, 200) - self.assertEqual(resp['last_import_at'], 123) - self.assertEqual(resp['last_import_modified_at'], 321) + self.assertEqual(resp['last_import_at'], now) + self.assertEqual(resp['last_import_modified_at'], now) patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' 'check_cluster_secret', return_value=False).start() code, resp = self.client.cloud_account_update( cloud_acc['id'], {'last_import_at': 456}) self.assertEqual(code, 400) - self.assertEqual(resp['error']['error_code'], 'OE0449') + self.assertEqual(resp['error']['error_code'], 'OE0559') code, resp = self.client.cloud_account_update( cloud_acc['id'], {'last_import_modified_at': 789}) self.assertEqual(code, 400) - self.assertEqual(resp['error']['error_code'], 'OE0449') + self.assertEqual(resp['error']['error_code'], 'OE0559') code, resp = self.client.cloud_account_update( cloud_acc['id'], {'last_import_attempt_at': 987}) @@ -895,13 +896,83 @@ def test_patch_cloud_acc_when_import_enabled(self): self.assertEqual(code, 200) self.assertEqual(self.p_configure_aws.call_count, 2) - def test_update_cloud_acc_events(self): + def test_patch_k8s_with_last_import(self): + code, cloud_acc = self.create_cloud_account( + self.org_id, self.valid_kubernetes_cloud_acc) + self.assertEqual(code, 201) + params = {'last_import_at': 1} + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=True).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], params) + self.assertEqual(code, 200) + + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=False).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], params) + self.assertEqual(code, 400) + self.assertEqual(resp['error']['error_code'], 'OE0560') + + def test_patch_last_import(self): code, cloud_acc = self.create_cloud_account( self.org_id, self.valid_aws_cloud_acc) self.assertEqual(code, 201) + now = int(datetime.datetime.now( + tz=datetime.timezone.utc).timestamp()) - 31 * 24 * 60 * 60 + for param in ['last_import_at', 'last_import_modified_at']: + params = {param: 1} + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=True).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], params) + self.assertEqual(code, 200) + + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=False).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], params) + self.assertEqual(code, 400) + self.assertEqual(resp['error']['error_code'], 'OE0559') + + params = {param: now} + code, resp = self.client.cloud_account_update( + cloud_acc['id'], params) + self.assertEqual(code, 200) + + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=True).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], {'last_import_attempt_at': now}) + self.assertEqual(code, 200) + + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=False).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], {'last_import_attempt_at': now}) + self.assertEqual(code, 400) + self.assertEqual(resp['error']['error_code'], 'OE0449') + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=True).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], {'last_import_attempt_error': 'test'}) + self.assertEqual(code, 200) + + patch('rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' + 'check_cluster_secret', return_value=False).start() + code, resp = self.client.cloud_account_update( + cloud_acc['id'], {'last_import_attempt_at': 'test'}) + self.assertEqual(code, 400) + self.assertEqual(resp['error']['error_code'], 'OE0449') + + def test_update_cloud_acc_events(self): + code, cloud_acc = self.create_cloud_account( + self.org_id, self.valid_aws_cloud_acc) + self.assertEqual(code, 201) patch('tools.cloud_adapter.clouds.aws.Aws.validate_credentials', - return_value={'account_id': 'another_acc_id', 'warnings': []}).start() + return_value={'account_id': 'another_acc_id', 'warnings': []} + ).start() params = {'config': { 'access_key_id': 'new_key', 'secret_access_key': 'new_secret', @@ -912,7 +983,8 @@ def test_update_cloud_acc_events(self): 'rest_api.rest_api_server.controllers.base.BaseController.' 'publish_activities_task' ).start() - code, cloud_acc = self.client.cloud_account_update(cloud_acc['id'], params) + code, cloud_acc = self.client.cloud_account_update( + cloud_acc['id'], params) self.assertEqual(code, 200) activity_param_tuples = self.get_publish_activity_tuple( self.org_id, cloud_acc['id'], 'cloud_account', @@ -924,6 +996,7 @@ def test_update_cloud_acc_events(self): *activity_param_tuples, add_token=True ) + now = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()) p_publish_activities1 = patch( 'rest_api.rest_api_server.controllers.base.BaseController.' 'publish_activities_task' @@ -932,9 +1005,9 @@ def test_update_cloud_acc_events(self): 'access_key_id': 'new_key', 'secret_access_key': 'new_secret', 'config_scheme': 'create_report'}, - 'last_import_at': 1, - 'last_import_modified_at': 1, - 'last_import_attempt_at': 1, + 'last_import_at': now, + 'last_import_modified_at': now, + 'last_import_attempt_at': now, 'last_import_attempt_error': 'error' } code, cloud_acc = self.client.cloud_account_update(cloud_acc['id'], @@ -955,7 +1028,7 @@ def test_update_cloud_acc_events(self): 'publish_activities_task' ).start() code, cloud_acc = self.client.cloud_account_update( - cloud_acc['id'], {'last_import_at': 1}) + cloud_acc['id'], {'last_import_at': now}) self.assertEqual(code, 200) p_publish_activities2.assert_not_called() @@ -1355,12 +1428,14 @@ def test_patch_kubernetes_import_time(self): kubernetes_cost_model = self.default_kubernetes_cost_model.copy() kubernetes_cost_model['cpu_hourly_cost'] = 0.123 valid_kubernetes_cloud_acc = self.valid_kubernetes_cloud_acc.copy() - valid_kubernetes_cloud_acc['config']['cost_model'] = kubernetes_cost_model + valid_kubernetes_cloud_acc['config'][ + 'cost_model'] = kubernetes_cost_model code, cloud_acc = self.create_cloud_account( self.org_id, valid_kubernetes_cloud_acc) self.assertEqual(code, 201) - patch('tools.cloud_adapter.clouds.kubernetes.Kubernetes.validate_credentials', + patch('tools.cloud_adapter.clouds.kubernetes.Kubernetes.' + 'validate_credentials', return_value={'account_id': cloud_acc['account_id'], 'warnings': []}).start() ts = int(datetime.datetime.utcnow().timestamp()) From 91e1f591366f069cb253d8571b666b23101b80c7 Mon Sep 17 00:00:00 2001 From: nk-hystax <128669932+nk-hystax@users.noreply.github.com> Date: Wed, 23 Oct 2024 12:13:08 +0300 Subject: [PATCH 2/3] OS-7922. Added missing space to error text --- tools/cloud_adapter/clouds/azure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/cloud_adapter/clouds/azure.py b/tools/cloud_adapter/clouds/azure.py index 74e103fa3..4cbbd1d7f 100644 --- a/tools/cloud_adapter/clouds/azure.py +++ b/tools/cloud_adapter/clouds/azure.py @@ -472,7 +472,7 @@ def _get_billing_info(self): consumption_api_supported = False elif is_empty: warnings.append( - 'Subscription %s (%s) doesn\'t have usage data yet or is' + 'Subscription %s (%s) doesn\'t have usage data yet or is ' 'not supported' % ( self._subscription_id, subscription_type)) elif is_timeout_error: From bb2006d4aef172b6646f01fddedc42b71f84a647 Mon Sep 17 00:00:00 2001 From: nk-hystax <128669932+nk-hystax@users.noreply.github.com> Date: Wed, 23 Oct 2024 12:14:14 +0300 Subject: [PATCH 3/3] OS-2553. Changed error message for not found synchronization --- .../handlers/v2/calendar_synchronizations.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/rest_api/rest_api_server/handlers/v2/calendar_synchronizations.py b/rest_api/rest_api_server/handlers/v2/calendar_synchronizations.py index 5645395e1..09c41f92b 100644 --- a/rest_api/rest_api_server/handlers/v2/calendar_synchronizations.py +++ b/rest_api/rest_api_server/handlers/v2/calendar_synchronizations.py @@ -181,6 +181,13 @@ class CalendarSynchronizationAsyncItemHandler(BaseAsyncItemHandler, def _get_controller_class(self): return CalendarSynchronizationAsyncController + async def _get_item(self, item_id, **kwargs): + res = await run_task(self.controller.get, item_id, **kwargs) + if res is None: + raise OptHTTPError(404, Err.OE0002, + ['Calendar synchronization', item_id]) + return res + async def patch(self, id, **kwargs): """ ---