Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pull request update/241025 #440

Merged
merged 3 commits into from
Oct 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 14 additions & 9 deletions diworker/diworker/importers/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,20 @@ def str_from_datetime(self, date_obj):
'%Y-%m-%dT%H:%M:%S.%fZ')

def detect_period_start(self):
# When choosing period_start for Azure, prioritize last expense date
# over date of the last import run. That is because for Azure the latest
# expenses are not available immediately and we need to load these
# expenses again on the next run.
last_import_at = self.get_last_import_date(self.cloud_acc_id)
if last_import_at:
self.period_start = last_import_at.replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1)
else:
ca_last_import_at = self.cloud_acc.get('last_import_at')
if (ca_last_import_at and datetime.utcfromtimestamp(
ca_last_import_at).month == datetime.now(
tz=timezone.utc).month):
# When choosing period_start for Azure, prioritize last expense
# date over date of the last import run. That is because for Azure
# the latest expenses are not available immediately and we need to
# load these expenses again on the next run.
last_exp_date = self.get_last_import_date(self.cloud_acc_id)
if last_exp_date:
self.period_start = last_exp_date.replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(
days=1)
if not self.period_start:
super().detect_period_start()

@retry_backoff(AzureConsumptionException,
Expand Down
21 changes: 15 additions & 6 deletions diworker/diworker/importers/gcp.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from collections import defaultdict
import hashlib
import logging
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from diworker.diworker.importers.base import BaseReportImporter

LOG = logging.getLogger(__name__)
Expand All @@ -15,11 +15,20 @@
class GcpReportImporter(BaseReportImporter):

def detect_period_start(self):
last_import_at = self.get_last_import_date(self.cloud_acc_id)
if last_import_at:
self.period_start = last_import_at.replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1)
else:
ca_last_import_at = self.cloud_acc.get('last_import_at')
if (ca_last_import_at and datetime.utcfromtimestamp(
ca_last_import_at).month == datetime.now(
tz=timezone.utc).month):
# When choosing period_start for GCP, prioritize last expense
# date over date of the last import run. That is because for GCP
# the latest expenses are not available immediately and we need to
# load these expenses again on the next run.
last_exp_date = self.get_last_import_date(self.cloud_acc_id)
if last_exp_date:
self.period_start = last_exp_date.replace(
hour=0, minute=0, second=0, microsecond=0) - timedelta(
days=1)
if not self.period_start:
super().detect_period_start()

def get_unique_field_list(self):
Expand Down
11 changes: 7 additions & 4 deletions rest_api/rest_api_server/controllers/cloud_account.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from optscale_client.herald_client.client_v2 import Client as HeraldClient

from sqlalchemy import Enum, true, or_
from sqlalchemy import Enum, true
from sqlalchemy.exc import IntegrityError
from sqlalchemy.sql import and_, exists
from tools.cloud_adapter.exceptions import (
Expand Down Expand Up @@ -48,7 +48,8 @@
CloudAccount, DiscoveryInfo, Organization, Pool)
from rest_api.rest_api_server.models.enums import CloudTypes, ConditionTypes
from rest_api.rest_api_server.controllers.base import BaseController
from rest_api.rest_api_server.controllers.base_async import BaseAsyncControllerWrapper
from rest_api.rest_api_server.controllers.base_async import (
BaseAsyncControllerWrapper)
from rest_api.rest_api_server.utils import (
check_bool_attribute, check_dict_attribute, check_float_attribute,
check_int_attribute, check_string, check_string_attribute,
Expand Down Expand Up @@ -90,7 +91,8 @@ def _check_organization(self, organization_id):
organization = OrganizationController(
self.session, self._config, self.token).get(organization_id)
if organization is None:
raise NotFoundException(Err.OE0005, [Organization.__name__, organization_id])
raise NotFoundException(
Err.OE0005, [Organization.__name__, organization_id])

def _validate(self, cloud_acc, is_new=True, **kwargs):
org_id = kwargs.get('organization_id')
Expand Down Expand Up @@ -518,7 +520,8 @@ def edit(self, item_id, **kwargs):
self._publish_validation_warnings_activities(updated_cloud_account,
warnings)
for import_f in ['last_import_at', 'last_import_modified_at',
'last_import_attempt_at', 'last_import_attempt_error']:
'last_import_attempt_at',
'last_import_attempt_error']:
kwargs.pop(import_f, None)
else:
updated_cloud_account = cloud_acc_obj
Expand Down
10 changes: 10 additions & 0 deletions rest_api/rest_api_server/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1018,3 +1018,13 @@ class Err(enum.Enum):
['name', 'resource_type'],
['dev-1', 'instance']
]
OE0559 = [
"\"%s\" date should be between a month and a year ago",
['last_import_at'],
[]
]
OE0560 = [
"Changing import dates is not supported for \"%s\" cloud account type",
['environment'],
[]
]
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,13 @@ class CalendarSynchronizationAsyncItemHandler(BaseAsyncItemHandler,
def _get_controller_class(self):
return CalendarSynchronizationAsyncController

async def _get_item(self, item_id, **kwargs):
res = await run_task(self.controller.get, item_id, **kwargs)
if res is None:
raise OptHTTPError(404, Err.OE0002,
['Calendar synchronization', item_id])
return res

async def patch(self, id, **kwargs):
"""
---
Expand Down
97 changes: 76 additions & 21 deletions rest_api/rest_api_server/handlers/v2/cloud_account.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import json

from datetime import datetime, timezone
from tools.optscale_exceptions.common_exc import (NotFoundException,
ForbiddenException)
from tools.optscale_exceptions.common_exc import WrongArgumentsException
from tools.optscale_exceptions.http_exc import OptHTTPError
from rest_api.rest_api_server.models.enums import CloudTypes
from rest_api.rest_api_server.controllers.cloud_account import CloudAccountAsyncController
from rest_api.rest_api_server.controllers.cloud_account import (
CloudAccountAsyncController)
from rest_api.rest_api_server.exceptions import Err
from rest_api.rest_api_server.handlers.v1.base_async import (
BaseAsyncCollectionHandler, BaseAsyncItemHandler)
from rest_api.rest_api_server.handlers.v2.base import BaseHandler
from rest_api.rest_api_server.handlers.v1.base import BaseAuthHandler
from rest_api.rest_api_server.utils import run_task, ModelEncoder
from rest_api.rest_api_server.utils import (
check_int_attribute, check_string_attribute, run_task, ModelEncoder)


class CloudAccountAsyncCollectionHandler(BaseAsyncCollectionHandler,
Expand Down Expand Up @@ -446,6 +449,48 @@ async def get(self, id, **kwargs):
result['details'] = res
self.write(json.dumps(result, cls=ModelEncoder))

@staticmethod
def _validate_params(cloud_acc, **kwargs):
secret = kwargs.get('secret')
validate_map = {
'last_import_attempt_at': check_int_attribute,
'last_import_attempt_error': check_string_attribute
}
for param, func in validate_map.items():
if not secret and param in kwargs:
raise OptHTTPError(
400, Err.OE0449, [param, 'cloud account'])
value = kwargs.get(param)
if value:
try:
func(param, value)
except WrongArgumentsException as exc:
raise OptHTTPError.from_opt_exception(400, exc)

for param in ['last_import_at', 'last_import_modified_at']:
value = kwargs.get(param)
if value:
if not secret and cloud_acc.type in [CloudTypes.ENVIRONMENT,
CloudTypes.KUBERNETES_CNR]:
raise OptHTTPError(
400, Err.OE0560, [cloud_acc.type.value])

try:
check_int_attribute(param, value)
except WrongArgumentsException as exc:
raise OptHTTPError.from_opt_exception(400, exc)
if not secret:
# dates should be less than a year ago and not a date in
# the current month if updated by a token
now = datetime.now(tz=timezone.utc)
min_date = int(
now.replace(year=now.year - 1).timestamp())
max_date = int(now.replace(
day=1, hour=0, minute=0, second=0,
microsecond=0).timestamp()) - 1
if value < min_date or value > max_date:
raise OptHTTPError(400, Err.OE0559, [param])

async def patch(self, id, **kwargs):
"""
---
Expand All @@ -469,30 +514,38 @@ async def patch(self, id, **kwargs):
properties:
last_import_at:
type: integer
description: Attention! This field is for internal use, it is undesirable to change it!
UTC timestamp of last successful data import
description: |
timestamp of last successful data import
last_import_attempt_at:
type: integer
description: Attention! This field is for internal use, it is undesirable to change it!
UTC timestamp of last data import attempt
description: |
Attention! This field is for internal use, it is
undesirable to change it! UTC timestamp of last
data import attempt
last_import_attempt_error:
type: string
description: Attention! This field is for internal use, it is undesirable to change it!
Error message of last data import attempt, null if no error
description: |
Attention! This field is for internal use, it is
undesirable to change it! Error message of last
data import attempt, null if no error
last_import_modified_at:
type: integer
description: Attention! This field is for internal use, it is undesirable to change it!
Last imported report modification time in timestamp format
description: |
Last imported report modification time in
timestamp format
cleaned_at:
type: integer
description: Attention! This field is for internal use, it is undesirable to change it!
UTC timestamp of date when cloud account was cleaned up
description: |
Attention! This field is for internal use, it is
undesirable to change it! UTC timestamp of date
when cloud account was cleaned up
name:
type: string
description: Cloud account name
process_recommendations:
type: boolean
description: Is recommendations enabled? Default is True
description: |
Is recommendations enabled? Default is True
config:
type: object
description: |
Expand All @@ -519,6 +572,8 @@ async def patch(self, id, **kwargs):
- OE0371: Unable to configure billing report
- OE0437: Can’t connect the cloud subscription
- OE0449: Parameter of cloud account can\'t be changed
- OE0559: Parameter date should be between a month and a year ago
- OE0560: Changing import dates is not supported for cloud account type
401:
description: |
Unauthorized:
Expand All @@ -545,16 +600,16 @@ async def patch(self, id, **kwargs):
- token: []
- secret: []
"""
data = self._request_body()
secret = True
if not self.check_cluster_secret(raises=False):
data = self._request_body()
not_changeable_param_list = ['last_import_at', 'last_import_modified_at',
'last_import_attempt_at', 'last_import_attempt_error']
for param in not_changeable_param_list:
if data.get(param):
raise OptHTTPError(400, Err.OE0449, [param, 'cloud account'])
await self.check_permissions('MANAGE_CLOUD_CREDENTIALS',
'cloud_account', id)
await super().patch(id, **kwargs)
secret = False
item = await self._get_item(id)
self._validate_params(item, **data, secret=secret)
res = await run_task(self.controller.edit, id, **data)
self.write(res.to_json())

async def delete(self, id, **kwargs):
"""
Expand Down
Loading
Loading