diff --git a/arcee/Dockerfile b/arcee/Dockerfile index ca17f2568..f633035e4 100644 --- a/arcee/Dockerfile +++ b/arcee/Dockerfile @@ -6,6 +6,7 @@ ENV PYTHONPATH /usr/src/app/ COPY arcee/arcee_receiver/requirements.txt arcee/arcee_receiver/requirements.txt COPY optscale_client/aconfig_cl optscale_client/aconfig_cl +COPY tools/optscale_time tools/optscale_time RUN pip install --no-cache-dir -r /usr/src/app/arcee/arcee_receiver/requirements.txt diff --git a/arcee/arcee_receiver/requirements.txt b/arcee/arcee_receiver/requirements.txt index 090ba3f86..7523af5c3 100644 --- a/arcee/arcee_receiver/requirements.txt +++ b/arcee/arcee_receiver/requirements.txt @@ -8,3 +8,4 @@ mongodb-migrations==1.2.1 pydantic==2.4.2 # OptScale packages -e optscale_client/aconfig_cl +-e tools/optscale_time diff --git a/arcee/arcee_receiver/server.py b/arcee/arcee_receiver/server.py index 12652ef53..a86cf2605 100644 --- a/arcee/arcee_receiver/server.py +++ b/arcee/arcee_receiver/server.py @@ -33,6 +33,7 @@ get_metrics as _get_task_metrics) from optscale_client.aconfig_cl.aconfig_cl import AConfigCl +import tools.optscale_time as opttime app = Sanic("arcee") @@ -648,7 +649,7 @@ async def update_run(request, body: RunPatchIn, run_id: str): d = body.model_dump(exclude_unset=True, exclude={'finish'}) # TODO: remove "finish" from PATCH payload. Set ts based on "state" if body.finish: - d.update({"finish": int(datetime.utcnow().timestamp())}) + d.update({"finish": opttime.utcnow_timestamp()}) hyperparameters = d.get("hyperparameters", {}) if hyperparameters: existing_hyperparams = r.get("hyperparameters", {}) @@ -683,7 +684,7 @@ async def create_run_milestone(request, run_id: str): d = { "_id": str(uuid.uuid4()), "run_id": run_id, - "timestamp": int(datetime.utcnow().timestamp()), + "timestamp": opttime.utcnow_timestamp(), "milestone": milestone, } await db.milestone.insert_one( @@ -1170,7 +1171,7 @@ async def create_token(request): d = { "_id": str(uuid.uuid4()), "token": token, - "created": int(datetime.utcnow().timestamp()), + "created": opttime.utcnow_timestamp(), "deleted_at": 0, } await db.token.insert_one( @@ -1201,8 +1202,7 @@ async def delete_token(request, token: str): await db.token.update_one( {"_id": token_id}, { '$set': { - "deleted_at": int( - datetime.utcnow().timestamp()), + "deleted_at": int(opttime.utcnow_timestamp()), } }) return json({"deleted": True, "id": token_id}) @@ -1256,7 +1256,7 @@ async def create_stage(request, run_id: str): d = { "_id": str(uuid.uuid4()), "run_id": run_id, - "timestamp": int(datetime.utcnow().timestamp()), + "timestamp": opttime.utcnow_timestamp(), "name": stage_name, } await db.stage.insert_one( @@ -1313,7 +1313,7 @@ async def create_proc_data(request, run_id: str): d = { "_id": str(uuid.uuid4()), "run_id": run_id, - "timestamp": int(datetime.utcnow().timestamp()), + "timestamp": opttime.utcnow_timestamp(), 'instance_id': instance, "proc_stats": proc_stats, } @@ -1619,7 +1619,7 @@ async def delete_leaderboard(request, id_: str): if not o: raise SanicException("Leaderboard not found", status_code=404) await db.leaderboard.update_one({"_id": id_}, {'$set': { - "deleted_at": int(datetime.utcnow().timestamp()) + "deleted_at": opttime.utcnow_timestamp() }}) return json('', status=204) @@ -1950,7 +1950,7 @@ async def delete_dataset(request, id_: str): if await _dataset_used_in_leaderboard(db, id_): raise SanicException("Dataset used in leaderboard", status_code=409) await db.dataset.update_one({"_id": id_}, {'$set': { - "deleted_at": int(datetime.utcnow().timestamp()) + "deleted_at": opttime.utcnow_timestamp() }}) return json('', status=204) diff --git a/auth/Dockerfile b/auth/Dockerfile index 2c326a13a..36150e43d 100644 --- a/auth/Dockerfile +++ b/auth/Dockerfile @@ -10,6 +10,7 @@ RUN apt-get update && apt-get install -y libsodium-dev \ && rm -rf /var/lib/apt/lists/* COPY tools/optscale_exceptions tools/optscale_exceptions +COPY tools/optscale_time tools/optscale_time COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY auth/zoho_integrator auth/zoho_integrator diff --git a/auth/auth_server/alembic/versions/d7d1cf2f5182_initial.py b/auth/auth_server/alembic/versions/d7d1cf2f5182_initial.py index 14e2b58e0..2dcc4f3dd 100644 --- a/auth/auth_server/alembic/versions/d7d1cf2f5182_initial.py +++ b/auth/auth_server/alembic/versions/d7d1cf2f5182_initial.py @@ -11,7 +11,7 @@ import uuid import string import random -from datetime import datetime +from datetime import datetime, timezone from alembic import op import sqlalchemy as sa from sqlalchemy import (Column, String, Integer, ForeignKey, Table, @@ -40,7 +40,7 @@ def gen_salt(): def get_current_timestamp(): - return int(datetime.utcnow().timestamp()) + return int(datetime.now(tz=timezone.utc).timestamp()) def create_tables(): diff --git a/auth/auth_server/auth_token/token_store.py b/auth/auth_server/auth_token/token_store.py index 9cf4d080c..0b42d82b9 100644 --- a/auth/auth_server/auth_token/token_store.py +++ b/auth/auth_server/auth_token/token_store.py @@ -1,5 +1,4 @@ import logging -import datetime from sqlalchemy import and_, or_ from auth.auth_server.auth_token.macaroon import MacaroonToken from auth.auth_server.exceptions import Err @@ -8,6 +7,7 @@ from auth.auth_server.utils import get_context_values, get_digest from tools.optscale_exceptions.common_exc import (UnauthorizedException, ForbiddenException) +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) @@ -25,7 +25,7 @@ def session(self): def check_token_valid(self, token_str): token = self.session.query(Token).filter( - Token.valid_until >= datetime.datetime.utcnow(), + Token.valid_until >= opttime.utcnow(), Token.digest == get_digest(token_str), ).all() if not token: diff --git a/auth/auth_server/controllers/assignment.py b/auth/auth_server/controllers/assignment.py index 17454bb10..411dae05a 100644 --- a/auth/auth_server/controllers/assignment.py +++ b/auth/auth_server/controllers/assignment.py @@ -1,5 +1,4 @@ import logging -import datetime from sqlalchemy import and_ from auth.auth_server.controllers.base import BaseController @@ -11,7 +10,7 @@ from tools.optscale_exceptions.common_exc import (ForbiddenException, NotFoundException, WrongArgumentsException) - +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) @@ -106,7 +105,7 @@ def delete(self, item_id, **kwargs): token = kwargs.get('token') user = self.get_user(token) self._check_assign_ability(token, user, item) - item.deleted_at = datetime.datetime.utcnow().timestamp() + item.deleted_at = opttime.utcnow_timestamp() self.session.add(item) self.session.commit() diff --git a/auth/auth_server/controllers/base.py b/auth/auth_server/controllers/base.py index 90a2a3475..bcddae146 100644 --- a/auth/auth_server/controllers/base.py +++ b/auth/auth_server/controllers/base.py @@ -1,6 +1,5 @@ import time import logging -from datetime import datetime import requests from ordered_set import OrderedSet from sqlalchemy import and_ @@ -18,6 +17,7 @@ ForbiddenException) from tools.optscale_exceptions.http_exc import handle503 from optscale_client.rest_api_client.client_v2 import Client as RestApiClient +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) @@ -106,7 +106,7 @@ def _get_model_type(self): def get_user(self, token): token = self.session.query(Token).get(get_digest(token)) - if not token or not token.valid_until > datetime.utcnow(): + if not token or not token.valid_until > opttime.utcnow(): raise UnauthorizedException(Err.OA0023, []) return token.user @@ -311,7 +311,7 @@ def nested_dict_iter(nested): def use_verification_code(self, email, code): if not email or not code: return - now = datetime.utcnow() + now = opttime.utcnow() return self.session.query(VerificationCode).filter( and_( VerificationCode.email == email, diff --git a/auth/auth_server/controllers/token.py b/auth/auth_server/controllers/token.py index 0e016f2a9..ed5130f8b 100644 --- a/auth/auth_server/controllers/token.py +++ b/auth/auth_server/controllers/token.py @@ -15,6 +15,7 @@ from tools.optscale_exceptions.common_exc import (WrongArgumentsException, ForbiddenException, NotFoundException) +from tools.optscale_time import utcnow from optscale_client.config_client.client import etcd LOG = logging.getLogger(__name__) @@ -98,7 +99,7 @@ def create_token_by_user_id(self, **kwargs): def create_user_token(self, user, **kwargs): model_type = self._get_model_type() LOG.info("Creating %s with parameters %s", model_type.__name__, kwargs) - now = datetime.datetime.utcnow() + now = utcnow() macaroon_token = MacaroonToken(user.salt, user.id).create( xstr(kwargs.get('register', False)), xstr(kwargs.get('provider', 'optscale')) diff --git a/auth/auth_server/controllers/user.py b/auth/auth_server/controllers/user.py index a62e1a06f..ce14b9407 100644 --- a/auth/auth_server/controllers/user.py +++ b/auth/auth_server/controllers/user.py @@ -1,4 +1,3 @@ -import datetime import logging import re from sqlalchemy.sql import func @@ -15,6 +14,7 @@ check_action, hash_password, is_email_format, get_input, check_string_attribute, check_bool_attribute, is_hystax_email, is_demo_email) +from tools.optscale_time import utcnow from tools.optscale_exceptions.common_exc import ( WrongArgumentsException, ForbiddenException, NotFoundException, ConflictException) @@ -185,7 +185,7 @@ def delete(self, item_id, **kwargs): 'DELETE_USER', item.type.name, item.scope_id ) or self._is_self_edit(user, item_id)): raise ForbiddenException(Err.OA0012, []) - item.deleted_at = datetime.datetime.utcnow().timestamp() + item.deleted_at = utcnow().timestamp() self.session.add(item) self.session.commit() diff --git a/auth/auth_server/controllers/verification_code.py b/auth/auth_server/controllers/verification_code.py index 79d17e554..b26f0722b 100644 --- a/auth/auth_server/controllers/verification_code.py +++ b/auth/auth_server/controllers/verification_code.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta +from datetime import timedelta from sqlalchemy import and_, exists from sqlalchemy.exc import IntegrityError from auth.auth_server.controllers.base import BaseController @@ -10,6 +10,7 @@ from auth.auth_server.utils import get_digest from tools.optscale_exceptions.common_exc import (WrongArgumentsException, ForbiddenException) +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) VERIFICATION_CODE_LIFETIME_HRS = 1 @@ -43,7 +44,7 @@ def _check_input(self, **input_): def _check_generation_timeout(self, email): model = self._get_model_type() - timeout = datetime.utcnow() - timedelta( + timeout = utcnow() - timedelta( minutes=GENERATION_THRESHOLD_MIN) code_exists = self.session.query(exists().where(and_( model.email == email, @@ -69,7 +70,7 @@ def _invalidate_verification_codes(self, email, deleted_at): def create_verification_code(self, email, code): model_type = self._get_model_type() LOG.info("Creating %s for %s", model_type.__name__, email) - now = datetime.utcnow() + now = utcnow() now_ts = int(now.timestamp()) params = { 'email': email, diff --git a/auth/auth_server/models/models.py b/auth/auth_server/models/models.py index 35f6db979..c6de1b7a0 100644 --- a/auth/auth_server/models/models.py +++ b/auth/auth_server/models/models.py @@ -3,7 +3,6 @@ import uuid import string import random -from datetime import datetime from sqlalchemy import Enum, UniqueConstraint from sqlalchemy.ext.declarative.base import _declarative_constructor from sqlalchemy.ext.declarative import declarative_base, declared_attr @@ -14,6 +13,7 @@ from auth.auth_server.utils import as_dict, ModelEncoder from auth.auth_server.models.exceptions import InvalidTreeException +from tools.optscale_time import utcnow, utcnow_timestamp def gen_id(): @@ -26,7 +26,7 @@ def gen_salt(): def get_current_timestamp(): - return int(datetime.utcnow().timestamp()) + return utcnow_timestamp() class PermissionKeys(Enum): @@ -230,7 +230,7 @@ class Token(Base): digest = Column(String(32), primary_key=True, nullable=False) user_id = Column(String(36), ForeignKey('user.id')) - created_at = Column(TIMESTAMP, nullable=False, default=datetime.utcnow) + created_at = Column(TIMESTAMP, nullable=False, default=utcnow) valid_until = Column(TIMESTAMP, nullable=False, index=True) ip = Column(String(39), nullable=False) user = relationship("User", backref="tokens") diff --git a/auth/auth_server/tests/unittests/test_api_role.py b/auth/auth_server/tests/unittests/test_api_role.py index 126be47df..6c0abd5b3 100644 --- a/auth/auth_server/tests/unittests/test_api_role.py +++ b/auth/auth_server/tests/unittests/test_api_role.py @@ -1,5 +1,4 @@ # pylint: disable=C0302 -import datetime import random import string from unittest.mock import patch @@ -8,6 +7,7 @@ Assignment, ActionGroup) from auth.auth_server.models.models import gen_salt from auth.auth_server.utils import hash_password +from tools.optscale_time import utcnow_timestamp RES_INFO_URL = "auth.auth_server.controllers.base." \ @@ -233,8 +233,7 @@ def test_get_roles_deleted_agrp(self, p_get_hierarchy, p_get_context, self.assertIsNotNone(response['actions'].get( self.roles_action_group.name)) session = self.db_session - self.roles_action_group.deleted_at = int( - datetime.datetime.utcnow().timestamp()) + self.roles_action_group.deleted_at = utcnow_timestamp() session.add(self.roles_action_group) session.commit() _, response = self.client.role_get(test_role1.id) @@ -261,8 +260,7 @@ def test_get_roles_deleted_action(self, p_get_hierarchy, p_get_context, self.cloud_sites_action_group.name].get( self.list_css_action.name)) session = self.db_session - self.list_css_action.deleted_at = int( - datetime.datetime.utcnow().timestamp()) + self.list_css_action.deleted_at = utcnow_timestamp() session.add(self.list_css_action) session.commit() _, response = self.client.role_get(test_role1.id) diff --git a/auth/auth_server/tests/unittests/test_api_token.py b/auth/auth_server/tests/unittests/test_api_token.py index 5d9dcd281..fa8550092 100644 --- a/auth/auth_server/tests/unittests/test_api_token.py +++ b/auth/auth_server/tests/unittests/test_api_token.py @@ -1,10 +1,11 @@ -from datetime import datetime, timedelta +from datetime import timedelta import uuid from auth.auth_server.tests.unittests.test_api_base import TestAuthBase from auth.auth_server.models.models import (Type, User, gen_salt, VerificationCode) from auth.auth_server.utils import hash_password, get_digest from auth.auth_server.tests.unittests.utils import extract_caveats +from tools.optscale_time import utcnow class TestTokenApi(TestAuthBase): @@ -125,7 +126,7 @@ def test_token_by_verification_code(self): session.add(partner_user) code_1, code_3, code_4 = 123456, 234567, 345678 - now = datetime.utcnow() + now = utcnow() vc_1 = VerificationCode( email='wrong@email.com', valid_until=now + timedelta(hours=1), code=get_digest(str(code_1))) diff --git a/auth/auth_server/tests/unittests/test_api_verification_code.py b/auth/auth_server/tests/unittests/test_api_verification_code.py index d5d03b1ce..4f564a489 100644 --- a/auth/auth_server/tests/unittests/test_api_verification_code.py +++ b/auth/auth_server/tests/unittests/test_api_verification_code.py @@ -3,6 +3,7 @@ from auth.auth_server.tests.unittests.test_api_base import TestAuthBase from auth.auth_server.models.models import Type, VerificationCode from auth.auth_server.utils import get_digest +from tools.optscale_time import utcnow class TestVerificationCodeApi(TestAuthBase): @@ -51,7 +52,7 @@ def test_invalid_parameters(self): def test_invalidation(self): code_1 = 1 - dt = datetime.utcnow() + dt = utcnow() with freeze_time(dt): _, verification_code_1 = self.client.verification_code_create( self.admin_user.email, code_1) @@ -94,7 +95,7 @@ def test_create_immutable(self): self.assertEqual(resp['error']['params'], [field]) def test_generation_time_threshold(self): - dt = datetime.utcnow() + dt = utcnow() with freeze_time(dt): code, _ = self.client.verification_code_create( self.admin_user.email, 1) diff --git a/auth/requirements.txt b/auth/requirements.txt index 87e481499..2c6ebfe73 100644 --- a/auth/requirements.txt +++ b/auth/requirements.txt @@ -13,5 +13,6 @@ pyyaml==6.0.1 zcrmsdk==3.1.0 # OptScale packages -e tools/optscale_exceptions +-e tools/optscale_time -e optscale_client/config_client -e optscale_client/rest_api_client diff --git a/bi_exporter/Dockerfile b/bi_exporter/Dockerfile index ad6e59fdf..068b38602 100644 --- a/bi_exporter/Dockerfile +++ b/bi_exporter/Dockerfile @@ -8,6 +8,7 @@ WORKDIR /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY bi_exporter/requirements.txt bi_exporter/requirements.txt diff --git a/bi_exporter/bumblebi/exporter/main.py b/bi_exporter/bumblebi/exporter/main.py index ede1bb910..f1f3b6090 100644 --- a/bi_exporter/bumblebi/exporter/main.py +++ b/bi_exporter/bumblebi/exporter/main.py @@ -1,6 +1,5 @@ import argparse import os -from datetime import datetime from typing import Optional from kombu import Exchange, Queue, Connection @@ -12,6 +11,7 @@ from optscale_client.config_client.client import Client as ConfigClient from bi_exporter.bumblebi.exporter.exporter_factory import ExporterFactory +from tools.optscale_time import utcnow_timestamp EXCHANGE_NAME = 'bi-exporter' QUEUE_NAME = 'bi-exporter' @@ -45,7 +45,7 @@ def get_consumers(self, Consumer, channel): @staticmethod def _now_ts(): - return int(datetime.utcnow().timestamp()) + return utcnow_timestamp() @property def valid_states_for_export(self): diff --git a/bi_exporter/bumblebi/tests/test_worker.py b/bi_exporter/bumblebi/tests/test_worker.py index 7e751855d..8d1b5bc9c 100644 --- a/bi_exporter/bumblebi/tests/test_worker.py +++ b/bi_exporter/bumblebi/tests/test_worker.py @@ -4,6 +4,7 @@ from datetime import datetime, timedelta from unittest.mock import MagicMock, PropertyMock, patch, mock_open, call from optscale_client.config_client.client import Client as ConfigClient +from tools.optscale_time import utcnow, utcnow_timestamp from bi_exporter.bumblebi.exporter.main import Worker @@ -57,7 +58,7 @@ def setUp(self) -> None: self.tag.encode('utf-8')).decode(): self.tag}, 'region': 'region', 'first_seen': 0, - 'last_seen': int(datetime.utcnow().timestamp()), + 'last_seen': utcnow_timestamp(), 'active': True, 'resource_type': 'Instance', 'service_name': 'service_name', @@ -98,7 +99,7 @@ def setUp(self) -> None: 'BaseExporter._get_resources', return_value=[self.mongo_resource]).start() patch('bi_exporter.bumblebi.exporter.exporter.AwsExporter._upload').start() - yesterday = datetime.utcnow() - timedelta(days=1) + yesterday = utcnow() - timedelta(days=1) self.clichouse_expenses = [ (yesterday, self.cloud_acc['id'], self.mongo_resource['_id'], 348.75)] patch('bi_exporter.bumblebi.exporter.exporter.' diff --git a/bi_exporter/requirements.txt b/bi_exporter/requirements.txt index 311743824..78dc6a247 100644 --- a/bi_exporter/requirements.txt +++ b/bi_exporter/requirements.txt @@ -7,3 +7,4 @@ pymongo==4.6.3 # OptScale packages -e optscale_client/config_client -e optscale_client/rest_api_client +-e tools/optscale_time diff --git a/bulldozer/bulldozer_api/Dockerfile b/bulldozer/bulldozer_api/Dockerfile index 57698f971..4ce0525bc 100644 --- a/bulldozer/bulldozer_api/Dockerfile +++ b/bulldozer/bulldozer_api/Dockerfile @@ -10,6 +10,7 @@ COPY bulldozer/bulldozer_api/migrations ./migrations COPY optscale_client/aconfig_cl optscale_client/aconfig_cl COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/insider_client optscale_client/insider_client +COPY tools/optscale_time tools/optscale_time RUN pip install --no-cache-dir -r /usr/src/app/bulldozer/requirements.txt diff --git a/bulldozer/bulldozer_api/cost_calc.py b/bulldozer/bulldozer_api/cost_calc.py index 75231120e..8ae804f80 100644 --- a/bulldozer/bulldozer_api/cost_calc.py +++ b/bulldozer/bulldozer_api/cost_calc.py @@ -10,6 +10,7 @@ from optscale_client.config_client.client import Client as ConfigClient from optscale_client.insider_client.client import Client as InsiderClient +from tools.optscale_time import utcnow_timestamp SECS_IN_HR = 3600 # seconds in hour @@ -66,7 +67,7 @@ def calc_runner_cost( ) if hourly_price: started_at = runner.get('started_at') - now = int(datetime.datetime.utcnow().timestamp()) + now = utcnow_timestamp() if started_at: destroyed_at = runner.get('destroyed_at') or now cost = self.get_cost(hourly_price, destroyed_at - started_at) diff --git a/bulldozer/bulldozer_api/requirements.txt b/bulldozer/bulldozer_api/requirements.txt index 1feeacc9e..0a14cbc5e 100644 --- a/bulldozer/bulldozer_api/requirements.txt +++ b/bulldozer/bulldozer_api/requirements.txt @@ -9,3 +9,4 @@ mongodb-migrations==1.2.1 -e optscale_client/config_client -e optscale_client/insider_client -e optscale_client/aconfig_cl +-e tools/optscale_time diff --git a/bulldozer/bulldozer_api/server.py b/bulldozer/bulldozer_api/server.py index 6f2d64e69..3a14ad994 100644 --- a/bulldozer/bulldozer_api/server.py +++ b/bulldozer/bulldozer_api/server.py @@ -18,7 +18,7 @@ from bulldozer.bulldozer_api.utils import permutation from optscale_client.aconfig_cl.aconfig_cl import AConfigCl - +from tools.optscale_time import utcnow_timestamp app = Sanic("bulldozer") @@ -152,7 +152,7 @@ async def create_token(request): d = { "_id": str(uuid.uuid4()), "token": token, - "created": int(datetime.datetime.utcnow().timestamp()), + "created": utcnow_timestamp(), "deleted_at": 0, } await db.token.insert_one( @@ -183,8 +183,7 @@ async def delete_token(request, token: str): await db.token.update_one( {"_id": token_id}, { '$set': { - "deleted_at": int( - datetime.datetime.utcnow().timestamp()), + "deleted_at": utcnow_timestamp(), } }) return json( @@ -246,7 +245,7 @@ async def create_template(request): "token": token, "tags": tags, "hyperparameters": hyperparameters, - "created_at": int(datetime.datetime.utcnow().timestamp()), + "created_at": utcnow_timestamp(), "deleted_at": 0 } await db.template.insert_one(d) @@ -401,7 +400,7 @@ async def delete_template(request, id_: str): status_code=409) await db.template.update_one({"_id": id_}, {'$set': { - "deleted_at": int(datetime.datetime.utcnow().timestamp()) + "deleted_at": utcnow_timestamp() }}) return json( '', @@ -458,7 +457,7 @@ async def submit_tasks(runners, state): "state": state, "runner_id": runner, "try": 0, - "updated": int(datetime.datetime.utcnow().timestamp()), + "updated": utcnow_timestamp(), "reason": "", "infra_try": 0 } @@ -502,7 +501,7 @@ async def create_runset(request, template_id: str): open_ingress = doc.get("open_ingress", False) runset_id = str(uuid.uuid4()) runset_cnt = await db.runset.count_documents({"template_id": template_id}) - created_at = int(datetime.datetime.utcnow().timestamp()) + created_at = utcnow_timestamp() d = { "_id": runset_id, "name": NameGenerator.get_random_name(), diff --git a/bulldozer/bulldozer_worker/Dockerfile b/bulldozer/bulldozer_worker/Dockerfile index c1934fc18..e61aeb042 100644 --- a/bulldozer/bulldozer_worker/Dockerfile +++ b/bulldozer/bulldozer_worker/Dockerfile @@ -18,6 +18,7 @@ COPY optscale_client/arcee_client optscale_client/arcee_client COPY optscale_client/bulldozer_client optscale_client/bulldozer_client COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time RUN pip install --no-cache-dir -r bulldozer/bulldozer_worker/requirements.txt COPY bulldozer/bulldozer_worker/*.py bulldozer/bulldozer_worker/ diff --git a/bulldozer/bulldozer_worker/requirements.txt b/bulldozer/bulldozer_worker/requirements.txt index cf36bfb41..9c04483f5 100644 --- a/bulldozer/bulldozer_worker/requirements.txt +++ b/bulldozer/bulldozer_worker/requirements.txt @@ -7,4 +7,4 @@ pymustache==0.3 -e optscale_client/config_client -e optscale_client/rest_api_client -e optscale_client/bulldozer_client - +-e tools/optscale_time diff --git a/bulldozer/bulldozer_worker/tasks.py b/bulldozer/bulldozer_worker/tasks.py index 0e3bdeacd..f56de76d2 100644 --- a/bulldozer/bulldozer_worker/tasks.py +++ b/bulldozer/bulldozer_worker/tasks.py @@ -3,6 +3,7 @@ from bulldozer.bulldozer_worker.infra import Infra, InfraException from bulldozer.bulldozer_worker.name_generator import NameGenerator +from tools.optscale_time import utcnow_timestamp LOG = logging.getLogger(__name__) @@ -258,7 +259,7 @@ def check_destroy_conditions(self): started_at = runner["started_at"] if started_at: threshold = started_at + max_duration - now = datetime.datetime.utcnow().timestamp() + now = utcnow_timestamp() LOG.info("runner id %s, current time: %d, threshold: %d", runner_id, now, threshold) if now > threshold: @@ -374,7 +375,7 @@ def _exec(self): self.bulldozer_cl.update_runner( runner_id, state=TaskState.ERROR, - destroyed_at=int(datetime.datetime.utcnow().timestamp())) + destroyed_at=utcnow_timestamp()) self.update_reason() self.message.ack() @@ -458,12 +459,12 @@ def _exec(self): self.bulldozer_cl.update_runner( runner_id, state=TaskState.WAITING_ARCEE, - started_at=int(datetime.datetime.utcnow().timestamp()), + started_at=utcnow_timestamp(), instance_id=id_, ip_addr=ip_addr, return_code=0 ) - self.body["updated"] = int(datetime.datetime.utcnow().timestamp()) + self.body["updated"] = utcnow_timestamp() self.update_task_state() super()._exec() @@ -490,7 +491,7 @@ def _exec(self): if not run_id: # check timeout last_updated = int(self.body.get("updated")) - current_time = int(datetime.datetime.utcnow().timestamp()) + current_time = utcnow_timestamp() wait_time = last_updated + ARCEE_WAIT_TIMEOUT_SEC LOG.info("runs not found. current time: %d, wait time: %s", current_time, wait_time) @@ -504,7 +505,7 @@ def _exec(self): run_id=run_id, state=TaskState.STARTED, ) - self.body["updated"] = int(datetime.datetime.utcnow().timestamp()) + self.body["updated"] = utcnow_timestamp() self.update_task_state() super()._exec() @@ -551,9 +552,9 @@ def _exec(self): self.bulldozer_cl.update_runner( runner_id, state=TaskState.DESTROYED, - destroyed_at=int(datetime.datetime.utcnow().timestamp()) + destroyed_at=utcnow_timestamp() ) - self.body["updated"] = int(datetime.datetime.utcnow().timestamp()) + self.body["updated"] = utcnow_timestamp() self.update_task_state() super()._exec() diff --git a/bumischeduler/Dockerfile b/bumischeduler/Dockerfile index 7c9bb18b2..62b0f8704 100644 --- a/bumischeduler/Dockerfile +++ b/bumischeduler/Dockerfile @@ -6,6 +6,7 @@ ENV PYTHONPATH /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY bumischeduler/requirements.txt ./bumischeduler/requirements.txt RUN pip install --no-cache-dir -r bumischeduler/requirements.txt diff --git a/bumischeduler/bumischeduler/controllers/schedule.py b/bumischeduler/bumischeduler/controllers/schedule.py index 99244916f..88f462a6a 100644 --- a/bumischeduler/bumischeduler/controllers/schedule.py +++ b/bumischeduler/bumischeduler/controllers/schedule.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta import logging import etcd @@ -7,6 +7,7 @@ from kombu.pools import producers from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow, utcnow_timestamp LOG = logging.getLogger(__name__) @@ -69,7 +70,7 @@ def get_checklists(self): return res['checklists'] def generate_tasks(self): - now = datetime.utcnow() + now = utcnow() checklists = self.get_checklists() bumi_worker_params = self.get_bumi_worker_params() scheduled = [] @@ -96,8 +97,7 @@ def generate_tasks(self): checklist['id'], {'next_run': next_run, 'last_run': int(now.timestamp())}) tasks.append({ - 'last_update': int( - datetime.utcnow().timestamp()), + 'last_update': utcnow_timestamp(), 'tries_count': 0, 'organization_id': checklist['organization_id'], 'checklist_id': checklist['id'], diff --git a/bumischeduler/bumischeduler/tests/unittests/test_scheduler.py b/bumischeduler/bumischeduler/tests/unittests/test_scheduler.py index f5ef63c58..7db679fb8 100644 --- a/bumischeduler/bumischeduler/tests/unittests/test_scheduler.py +++ b/bumischeduler/bumischeduler/tests/unittests/test_scheduler.py @@ -1,10 +1,9 @@ import unittest - -from datetime import datetime from unittest.mock import patch, call, MagicMock from bumischeduler.bumischeduler.controllers.schedule import ( ScheduleController, RESCHEDULE_TIMEOUT) +from tools.optscale_time import utcnow GET_CHECKLISTS = ("bumischeduler.bumischeduler.controllers.schedule." "ScheduleController.get_checklists") @@ -43,7 +42,7 @@ def test_no_schedules(self, p_create_tasks, p_get_checklists): @patch(GET_CHECKLISTS) @patch(CREATE_TASKS) def test_initial(self, p_create_tasks, p_get_checklists): - now = datetime.utcnow() + now = utcnow() checklist = { 'id': 'aabe9d07-2eca-42de-9d2d-ad3984c4fb0f', 'last_run': 0, @@ -70,7 +69,7 @@ def test_initial(self, p_create_tasks, p_get_checklists): @patch(GET_CHECKLISTS) @patch(CREATE_TASKS) def test_completed(self, p_create_tasks, p_get_checklists): - now = datetime.utcnow() + now = utcnow() checklist = { 'id': 'aabe9d07-2eca-42de-9d2d-ad3984c4fb0f', 'last_run': int(now.timestamp()) - 10, @@ -86,7 +85,7 @@ def test_completed(self, p_create_tasks, p_get_checklists): @patch(GET_CHECKLISTS) @patch(CREATE_TASKS) def test_in_progress(self, p_create_tasks, p_get_checklists): - now = datetime.utcnow() + now = utcnow() checklist = { 'id': 'aabe9d07-2eca-42de-9d2d-ad3984c4fb0f', 'last_run': int(now.timestamp()) - 10, @@ -102,7 +101,7 @@ def test_in_progress(self, p_create_tasks, p_get_checklists): @patch(GET_CHECKLISTS) @patch(CREATE_TASKS) def test_reschedule(self, p_create_tasks, p_get_checklists): - now = datetime.utcnow() + now = utcnow() checklist = { 'id': 'aabe9d07-2eca-42de-9d2d-ad3984c4fb0f', 'last_run': int(now.timestamp()) - RESCHEDULE_TIMEOUT, diff --git a/bumischeduler/requirements.txt b/bumischeduler/requirements.txt index 08889b735..bd21371ab 100644 --- a/bumischeduler/requirements.txt +++ b/bumischeduler/requirements.txt @@ -3,3 +3,4 @@ kombu==5.3.4 # OptScale packages -e optscale_client/config_client -e optscale_client/rest_api_client +-e tools/optscale_time diff --git a/bumiworker/Dockerfile b/bumiworker/Dockerfile index 89828fd59..637295315 100644 --- a/bumiworker/Dockerfile +++ b/bumiworker/Dockerfile @@ -10,6 +10,7 @@ COPY optscale_client/metroculus_client optscale_client/metroculus_client COPY optscale_client/insider_client optscale_client/insider_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY tools/cloud_adapter tools/cloud_adapter +COPY tools/optscale_time tools/optscale_time COPY bumiworker/requirements.txt ./bumiworker/requirements.txt RUN pip install --no-cache-dir -r bumiworker/requirements.txt diff --git a/bumiworker/bumiworker/modules/abandoned_base.py b/bumiworker/bumiworker/modules/abandoned_base.py index 4a7ade7b6..b96ae7c40 100644 --- a/bumiworker/bumiworker/modules/abandoned_base.py +++ b/bumiworker/bumiworker/modules/abandoned_base.py @@ -3,6 +3,7 @@ from bumiworker.bumiworker.modules.base import ( ArchiveBase, ArchiveReason, ModuleBase, DAYS_IN_MONTH ) +from tools.optscale_time import utcnow, startday class AbandonedBase(ModuleBase): @@ -26,10 +27,8 @@ def get_active_resources(self, cloud_account_ids, start_date, return resources_by_account_map def get_avg_daily_expenses(self, resource_ids, start_date): - today = datetime.utcnow().replace( - hour=0, minute=0, second=0, microsecond=0) - start_date = start_date.replace( - hour=0, minute=0, second=0, microsecond=0) + today = startday(utcnow()) + start_date = startday(start_date) external_table = [{'id': r_id} for r_id in resource_ids] query = """ SELECT resource_id, sum(cost * sign), min(date) @@ -103,7 +102,7 @@ def excluded_pools(self): return self.get_options().get('excluded_pools') def _get(self): - now = datetime.utcnow() + now = utcnow() start_date = now - timedelta(days=self.days_threshold) cloud_accounts = self.get_cloud_accounts(self.SUPPORTED_CLOUD_TYPES, diff --git a/bumiworker/bumiworker/modules/archive/abandoned_images.py b/bumiworker/bumiworker/modules/archive/abandoned_images.py index be0ef698e..0513b68d4 100644 --- a/bumiworker/bumiworker/modules/archive/abandoned_images.py +++ b/bumiworker/bumiworker/modules/archive/abandoned_images.py @@ -1,5 +1,5 @@ from collections import defaultdict -from datetime import datetime, timedelta +from datetime import timedelta from bumiworker.bumiworker.consts import ArchiveReason from bumiworker.bumiworker.modules.base import ArchiveBase @@ -7,6 +7,7 @@ AbandonedImages as AbandonedImagesRecommendation, SUPPORTED_CLOUD_TYPES ) +from tools.optscale_time import utcnow class AbandonedImages(ArchiveBase, AbandonedImagesRecommendation): @@ -22,7 +23,7 @@ def supported_cloud_types(self): def _get(self, previous_options, optimizations, cloud_accounts_map, **kwargs): days_threshold = previous_options['days_threshold'] - now = datetime.utcnow() + now = utcnow() start_date = now - timedelta(days=days_threshold) account_optimizations_map = defaultdict(list) diff --git a/bumiworker/bumiworker/modules/archive/abandoned_instances.py b/bumiworker/bumiworker/modules/archive/abandoned_instances.py index e02370a62..da51e08a0 100644 --- a/bumiworker/bumiworker/modules/archive/abandoned_instances.py +++ b/bumiworker/bumiworker/modules/archive/abandoned_instances.py @@ -7,7 +7,7 @@ from bumiworker.bumiworker.modules.recommendations.abandoned_instances import ( AbandonedInstances as AbandonedInstancesRecommendation, SUPPORTED_CLOUD_TYPES) - +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) @@ -28,7 +28,7 @@ def _get(self, previous_options, optimizations, cloud_accounts_map, cpu_percent_threshold = previous_options['cpu_percent_threshold'] network_bps_threshold = previous_options['network_bps_threshold'] - now = datetime.utcnow() + now = utcnow() start_date = now - timedelta(days=days_threshold) cloud_acc_instances_map = defaultdict(dict) for cloud_acc_id, instances in self.get_active_resources( diff --git a/bumiworker/bumiworker/modules/base.py b/bumiworker/bumiworker/modules/base.py index b75b845ed..79a7fc7ad 100644 --- a/bumiworker/bumiworker/modules/base.py +++ b/bumiworker/bumiworker/modules/base.py @@ -10,6 +10,8 @@ from pymongo import MongoClient, UpdateOne from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import (utcfromtimestamp, utcnow, startday, + utcnow_timestamp) from bumiworker.bumiworker.consts import ArchiveReason @@ -28,11 +30,11 @@ def __init__(self, module, module_type, organization_id): self._start = None def __enter__(self): - self._start = datetime.utcnow().timestamp() + self._start = utcnow_timestamp() return self def __exit__(self, exc_type, exc, exc_tb): - total = datetime.utcnow().timestamp() - self._start + total = utcnow_timestamp() - self._start LOG.info( '%s module %s (organization_id %s) completed in %0.2f seconds', self.module_type.capitalize(), self.module, self.organization_id, total) @@ -161,7 +163,7 @@ def unique_record_keys(self): @staticmethod def timestamp_to_day_start(timestamp) -> datetime: - return datetime.utcfromtimestamp(timestamp).replace( + return utcfromtimestamp(timestamp).replace( hour=0, minute=0, second=0, microsecond=0) def get_organization_currency(self): @@ -231,10 +233,9 @@ def get_resources_stuck_in_state(self, resource_type, status_field_name, _, response = self.rest_client.cloud_resources_discover( self.organization_id, resource_type) starting_point = int( - (datetime.utcnow() - timedelta(days=delta_days)).timestamp() + (utcnow() - timedelta(days=delta_days)).timestamp() ) - today = datetime.utcnow().replace(hour=0, minute=0, second=0, - microsecond=0) + today = startday(utcnow()) month_ago_timestamp = (today - timedelta(days=1) - timedelta( days=DAYS_IN_MONTH)) resources = response['data'] @@ -250,10 +251,10 @@ def get_by_cost_saving_timestamp(resource, is_saving=True): if is_saving: return (month_ago_timestamp if int(month_ago_timestamp.timestamp()) > res_dt else - datetime.utcfromtimestamp(res_dt).replace( + utcfromtimestamp(res_dt).replace( hour=0, minute=0, second=0, microsecond=0)) else: - return datetime.utcfromtimestamp(res_dt).replace( + return utcfromtimestamp(res_dt).replace( hour=0, minute=0, second=0, microsecond=0) def get_cost_saving(is_saving=True): diff --git a/bumiworker/bumiworker/modules/obsolete_snapshots_base.py b/bumiworker/bumiworker/modules/obsolete_snapshots_base.py index f5ee8c933..60e232219 100644 --- a/bumiworker/bumiworker/modules/obsolete_snapshots_base.py +++ b/bumiworker/bumiworker/modules/obsolete_snapshots_base.py @@ -8,6 +8,7 @@ from bumiworker.bumiworker.consts import ArchiveReason from bumiworker.bumiworker.modules.base import ArchiveBase, ModuleBase +from tools.optscale_time import utcnow, startday BULK_SIZE = 2000 DAYS_IN_MONTH = 30 @@ -230,7 +231,7 @@ def get_used_resources(self, now, cloud_account_id, cloud_config, def _get(self, previous_options, optimizations, cloud_accounts_map, **kwargs): - now = datetime.utcnow() + now = utcnow() days_threshold = previous_options['days_threshold'] obsolete_threshold = timedelta(days_threshold) diff --git a/bumiworker/bumiworker/modules/recommendations/abandoned_images.py b/bumiworker/bumiworker/modules/recommendations/abandoned_images.py index b45ac622b..a0f0eb25d 100644 --- a/bumiworker/bumiworker/modules/recommendations/abandoned_images.py +++ b/bumiworker/bumiworker/modules/recommendations/abandoned_images.py @@ -1,7 +1,8 @@ import logging from collections import OrderedDict -from datetime import datetime, timedelta +from datetime import timedelta from bumiworker.bumiworker.modules.abandoned_base import AbandonedBase +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) @@ -49,7 +50,7 @@ def _get(self): SUPPORTED_CLOUD_TYPES, skip_cloud_accounts) cloud_accounts = list(cloud_account_map.values()) cloud_accounts_ids = list(cloud_account_map.keys()) - starting_point = datetime.utcnow() - timedelta(days=days_threshold) + starting_point = utcnow() - timedelta(days=days_threshold) employees = self.get_employees() pools = self.get_pools() diff --git a/bumiworker/bumiworker/modules/recommendations/abandoned_instances.py b/bumiworker/bumiworker/modules/recommendations/abandoned_instances.py index f67669b47..e65a64850 100644 --- a/bumiworker/bumiworker/modules/recommendations/abandoned_instances.py +++ b/bumiworker/bumiworker/modules/recommendations/abandoned_instances.py @@ -4,6 +4,7 @@ from optscale_client.metroculus_client.client import Client as MetroculusClient from bumiworker.bumiworker.modules.abandoned_base import AbandonedBase +from tools.optscale_time import utcnow SUPPORTED_CLOUD_TYPES = [ 'aws_cnr', @@ -90,7 +91,7 @@ def _get(self): (days_threshold, cpu_percent_threshold, network_bps_threshold, excluded_pools, skip_cloud_accounts) = self.get_options_values() - now = datetime.utcnow() + now = utcnow() start_date = now - timedelta(days=days_threshold) cloud_accounts = self.get_cloud_accounts(SUPPORTED_CLOUD_TYPES, diff --git a/bumiworker/bumiworker/modules/recommendations/instance_migration.py b/bumiworker/bumiworker/modules/recommendations/instance_migration.py index 39688e0b0..498a7e323 100644 --- a/bumiworker/bumiworker/modules/recommendations/instance_migration.py +++ b/bumiworker/bumiworker/modules/recommendations/instance_migration.py @@ -5,7 +5,7 @@ from tools.cloud_adapter.clouds.aws import Aws from tools.cloud_adapter.clouds.alibaba import Alibaba - +from tools.optscale_time import utcnow from bumiworker.bumiworker.modules.base import ModuleBase @@ -113,7 +113,7 @@ def get_skus_from_cloud(sku): similar_skus = self.aws.get_similar_sku_prices(sku) updates = [] for sku in similar_skus: - sku['updated_at'] = datetime.utcnow() + sku['updated_at'] = utcnow() updates.append(UpdateOne( filter={'sku': sku['sku']}, update={'$set': sku}, @@ -124,7 +124,7 @@ def get_skus_from_cloud(sku): sku_dict = list(self.aws_prices.find({ 'sku': sku, - 'updated_at': {'$gte': datetime.utcnow() - timedelta(days=60)} + 'updated_at': {'$gte': utcnow() - timedelta(days=60)} })) if sku_dict: LOG.info('Found SKU %s for instance %s in DB', sku, resource_id) @@ -154,7 +154,7 @@ def get_aws_recommendations(self, instance_map, cloud_account_map, {'cloud_account_id': { '$in': list(cloud_account_map.keys())}}, {'start_date': { - '$gte': datetime.utcnow() - timedelta(days=10)}}, + '$gte': utcnow() - timedelta(days=10)}}, {'cost': {'$ne': 0}}, ] }}, diff --git a/bumiworker/bumiworker/modules/recommendations/instance_subscription.py b/bumiworker/bumiworker/modules/recommendations/instance_subscription.py index 540d80346..9d049af02 100644 --- a/bumiworker/bumiworker/modules/recommendations/instance_subscription.py +++ b/bumiworker/bumiworker/modules/recommendations/instance_subscription.py @@ -2,9 +2,9 @@ from collections import defaultdict, OrderedDict from concurrent.futures.thread import ThreadPoolExecutor -from datetime import datetime, timedelta +from datetime import timedelta from requests import HTTPError - +from tools.optscale_time import utcnow from optscale_client.insider_client.client import Client as InsiderClient from bumiworker.bumiworker.modules.base import ModuleBase @@ -180,7 +180,7 @@ def _get(self): cloud_account_map = self.get_cloud_accounts( SUPPORTED_CLOUD_TYPES, skip_cloud_accounts) cloud_account_ids = list(cloud_account_map.keys()) - now = datetime.utcnow() + now = utcnow() range_start_ts = int( (now - timedelta(days=days_threshold)).timestamp()) cloud_acc_instance_map = self.get_cloud_acc_instances_map( diff --git a/bumiworker/bumiworker/modules/recommendations/nebius_migration.py b/bumiworker/bumiworker/modules/recommendations/nebius_migration.py index 22835481e..8cd91e209 100644 --- a/bumiworker/bumiworker/modules/recommendations/nebius_migration.py +++ b/bumiworker/bumiworker/modules/recommendations/nebius_migration.py @@ -2,11 +2,12 @@ import re import json from collections import OrderedDict, defaultdict -from datetime import datetime, timedelta +from datetime import timedelta from optscale_client.insider_client.client import Client as InsiderClient from tools.cloud_adapter.clouds.nebius import Nebius, PLATFORMS from concurrent.futures.thread import ThreadPoolExecutor from bumiworker.bumiworker.modules.base import ModuleBase +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) DEFAULT_DAYS_THRESHOLD = 30 @@ -219,7 +220,7 @@ def _get(self): supported_cloud_types=list(cloud_func_map.keys()), skip_cloud_accounts=skip_cloud_accounts) cloud_account_ids = list(cloud_account_map.keys()) - dt = datetime.utcnow() - timedelta(seconds=days_threshold * DAY_IN_SEC) + dt = utcnow() - timedelta(seconds=days_threshold * DAY_IN_SEC) month_multiplier = DAYS_IN_MONTH / days_threshold last_seen = int(dt.timestamp()) instances = self.mongo_client.restapi.resources.find({ diff --git a/bumiworker/bumiworker/modules/recommendations/obsolete_images.py b/bumiworker/bumiworker/modules/recommendations/obsolete_images.py index 50a6b9b8d..a023dca98 100644 --- a/bumiworker/bumiworker/modules/recommendations/obsolete_images.py +++ b/bumiworker/bumiworker/modules/recommendations/obsolete_images.py @@ -5,6 +5,7 @@ from bumiworker.bumiworker.modules.base import ModuleBase from tools.cloud_adapter.cloud import Cloud as CloudAdapter +from tools.optscale_time import utcnow DEFAULT_DAYS_THRESHOLD = 7 BULK_SIZE = 1000 @@ -179,7 +180,7 @@ def _get(self): cloud_accounts = list(cloud_account_map.values()) account_id_type_map = {x['id']: x['type'] for x in cloud_accounts} - starting_point = datetime.utcnow() - timedelta(days=days_threshold) + starting_point = utcnow() - timedelta(days=days_threshold) images_map = self._get_images_map(cloud_accounts, starting_point) image_ids = list(images_map.keys()) @@ -228,7 +229,7 @@ def _get(self): snapshot_info_map = self.get_snapshot_info_map( account_id_type_map, snapshot_image_map, starting_point) - today = datetime.utcnow() + today = utcnow() _, days_in_month = monthrange(today.year, today.month) for snapshot_id, image_ids in snapshot_image_map.items(): for image_id in image_ids: diff --git a/bumiworker/bumiworker/modules/recommendations/short_living_instances.py b/bumiworker/bumiworker/modules/recommendations/short_living_instances.py index c3f98b7d0..0c56aba58 100644 --- a/bumiworker/bumiworker/modules/recommendations/short_living_instances.py +++ b/bumiworker/bumiworker/modules/recommendations/short_living_instances.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from bumiworker.bumiworker.modules.base import ModuleBase +from tools.optscale_time import utcfromtimestamp, utcnow DAYS_RANGE = 3 LIVE_HRS_THRESHOLD = 6 @@ -73,8 +74,8 @@ def _get(self): cloud_account_map = self.get_cloud_accounts( supported_cloud_types=SUPPORTED_CLOUD_TYPES, skip_cloud_accounts=skip_cloud_accounts) - now = datetime.utcnow() - start_datetime = datetime.utcfromtimestamp(0) + now = utcnow() + start_datetime = utcfromtimestamp(0) start_date = now - timedelta(days=days_threshold) first_seen = start_date.replace(hour=0, minute=0, second=0, microsecond=0).timestamp() diff --git a/bumiworker/bumiworker/modules/reserved_instances_base.py b/bumiworker/bumiworker/modules/reserved_instances_base.py index 93e71574e..e816a55d7 100644 --- a/bumiworker/bumiworker/modules/reserved_instances_base.py +++ b/bumiworker/bumiworker/modules/reserved_instances_base.py @@ -6,6 +6,7 @@ from bumiworker.bumiworker.consts import ArchiveReason from bumiworker.bumiworker.modules.base import ArchiveBase, ModuleBase +from tools.optscale_time import utcnow DEFAULT_DAYS_THRESHOLD = 90 LOG = logging.getLogger(__name__) @@ -90,7 +91,7 @@ def _get(self): cloud_account_map = self.get_cloud_accounts( self.SUPPORTED_CLOUD_TYPES, skip_cloud_accounts) cloud_account_ids = list(cloud_account_map.keys()) - now = datetime.utcnow() + now = utcnow() range_start_ts = int((now - timedelta(days=days_threshold)).timestamp()) instance_map = self.get_instances_map(cloud_account_ids, range_start_ts) cloud_resource_ids = list(instance_map.keys()) diff --git a/bumiworker/bumiworker/modules/rightsizing_base.py b/bumiworker/bumiworker/modules/rightsizing_base.py index 60df4e84f..7bbb30096 100644 --- a/bumiworker/bumiworker/modules/rightsizing_base.py +++ b/bumiworker/bumiworker/modules/rightsizing_base.py @@ -2,7 +2,7 @@ import logging from collections import defaultdict -from datetime import datetime, timedelta +from datetime import timedelta from math import ceil import re @@ -10,7 +10,7 @@ from optscale_client.insider_client.client import Client as InsiderClient from optscale_client.metroculus_client.client import Client as MetroculusClient - +from tools.optscale_time import utcnow from bumiworker.bumiworker.modules.base import ( ModuleBase, ArchiveBase, ArchiveReason ) @@ -100,7 +100,7 @@ def get_recommended_cpu(self, flavor_cpu, instance_metrics, @staticmethod def get_common_match_pipeline(resource_ids, cloud_account_ids): - now = datetime.utcnow() + now = utcnow() return { '$match': { '$and': [ @@ -369,7 +369,7 @@ def _get(self): self.cloud_account_map = self.get_cloud_accounts( supported_types, skip_cloud_accounts) - min_dt = datetime.utcnow() - timedelta(days=days_threshold) + min_dt = utcnow() - timedelta(days=days_threshold) instances = self._get_instances(list(self.cloud_account_map.keys()), int(min_dt.timestamp())) @@ -418,7 +418,7 @@ def write_stat(msg): def get_base_agr_cpu_metric(self, cloud_account_id, resource_ids, days_threshold): - now = datetime.utcnow() + now = utcnow() start = now - timedelta(days=days_threshold) _, metrics = self.metroculus_cl.get_aggregated_metrics( cloud_account_id, resource_ids, int(start.timestamp()), @@ -668,7 +668,7 @@ def _get(self, previous_options, optimizations, cloud_accounts_map, days_threshold = previous_options['days_threshold'] cloud_acc_instances_map = defaultdict(dict) - min_dt = datetime.utcnow() - timedelta(days=days_threshold) + min_dt = utcnow() - timedelta(days=days_threshold) for cloud_acc_id, instances in self._get_instances( list(cloud_accounts_map.keys()), int(min_dt.timestamp())).items(): diff --git a/bumiworker/bumiworker/modules/stuck_in_state_for_a_long_time_base.py b/bumiworker/bumiworker/modules/stuck_in_state_for_a_long_time_base.py index 1d73e3cf0..005cb03db 100644 --- a/bumiworker/bumiworker/modules/stuck_in_state_for_a_long_time_base.py +++ b/bumiworker/bumiworker/modules/stuck_in_state_for_a_long_time_base.py @@ -1,10 +1,11 @@ import logging from collections import defaultdict -from datetime import datetime, timedelta +from datetime import timedelta from bumiworker.bumiworker.consts import ArchiveReason from bumiworker.bumiworker.modules.base import ArchiveBase +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) @@ -30,7 +31,7 @@ def is_state_changed(self, resource, start_date): def _get(self, previous_options, optimizations, cloud_accounts_map, **kwargs): - now = datetime.utcnow() + now = utcnow() days_threshold = previous_options['days_threshold'] start_date = int((now - timedelta(days=days_threshold)).timestamp()) account_optimizations_map = defaultdict(list) diff --git a/bumiworker/bumiworker/tasks.py b/bumiworker/bumiworker/tasks.py index 28e11f0ff..854ad745e 100644 --- a/bumiworker/bumiworker/tasks.py +++ b/bumiworker/bumiworker/tasks.py @@ -1,4 +1,3 @@ -import datetime import json import os import uuid @@ -15,6 +14,7 @@ from optscale_client.herald_client.client_v2 import Client as HeraldClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow_timestamp LOG = get_logger(__name__) @@ -143,15 +143,14 @@ def _execute(self): class UpdateTimeout(Continue): def _execute(self): - self.body['last_update'] = int( - datetime.datetime.utcnow().timestamp()) + self.body['last_update'] = utcnow_timestamp() super()._execute() class CheckTimeoutThreshold(UpdateTimeout): def check_timeout(self): # MAX_UPDATE_THRESHOLD from last step execution exceeded - if (int(datetime.datetime.utcnow().timestamp()) - + if (utcnow_timestamp() - self.body['last_update'] > self.body['task_timeout']): raise BumiTaskTimeoutError( 'Timeout error while process task %s, step %s' % ( @@ -161,14 +160,13 @@ def check_timeout(self): class CheckWaitThreshold(Continue): def _handle_exception(self, exc): # further waiters should be able to count from scratch - self.body['last_update'] = int( - datetime.datetime.utcnow().timestamp()) + self.body['last_update'] = utcnow_timestamp() super()._handle_exception(exc) def check_timeout(self): # MAX_WAIT_THRESHOLD from previous step execution exceeded - if (int(datetime.datetime.utcnow().timestamp()) - - self.body['last_update'] > self.body['wait_timeout']): + if (utcnow_timestamp() - self.body['last_update'] > + self.body['wait_timeout']): raise BumiTaskWaitError( 'Wait error while process task %s, step %s' % ( task_str(self.body), self.step)) @@ -181,8 +179,8 @@ def is_delayed(self): def _handle_exception(self, exc): # put msg to queue and hope that it'll gracefully finish - if (int(datetime.datetime.utcnow().timestamp()) - - self.body['last_update'] <= self.body['wait_timeout']): + if (utcnow_timestamp() - self.body['last_update'] <= + self.body['wait_timeout']): self.on_continue_cb(self.body, self.is_delayed) else: LOG.error('Aborting task %s, step %s since graceful fail seems impossible', @@ -276,8 +274,7 @@ def update_task_state(self): def _get_child_task(self, module): return { - 'last_update': int( - datetime.datetime.utcnow().timestamp()), + 'last_update': utcnow_timestamp(), 'tries_count': 0, 'created_at': self.body['created_at'], 'checklist_id': self.body['checklist_id'], @@ -337,8 +334,7 @@ def _execute(self): s3_objects = self.s3_client.list_objects_v2( Bucket=BUCKET_NAME, Prefix=prefix) if s3_objects['KeyCount'] == self.body['children_count']: - self.body['last_update'] = int( - datetime.datetime.utcnow().timestamp()) + self.body['last_update'] = utcnow_timestamp() self.update_task_state() super()._execute() diff --git a/bumiworker/requirements.txt b/bumiworker/requirements.txt index 6e582aa0f..1438be1d8 100644 --- a/bumiworker/requirements.txt +++ b/bumiworker/requirements.txt @@ -4,6 +4,7 @@ boto3==1.34.7 clickhouse-driver==0.2.6 # OptScale packages -e tools/cloud_adapter +-e tools/optscale_time -e optscale_client/config_client -e optscale_client/herald_client -e optscale_client/insider_client diff --git a/diworker/Dockerfile b/diworker/Dockerfile index 8ca17c31e..98b40aef5 100644 --- a/diworker/Dockerfile +++ b/diworker/Dockerfile @@ -7,6 +7,7 @@ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/insider_client optscale_client/insider_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY tools/cloud_adapter tools/cloud_adapter +COPY tools/optscale_time tools/optscale_time COPY diworker/*.py diworker/ COPY diworker/requirements.txt diworker/requirements.txt diff --git a/diworker/diworker/importers/alibaba.py b/diworker/diworker/importers/alibaba.py index 7d1880484..3629102dc 100644 --- a/diworker/diworker/importers/alibaba.py +++ b/diworker/diworker/importers/alibaba.py @@ -8,6 +8,7 @@ from diworker.diworker.importers.base import BaseReportImporter from diworker.diworker.utils import bytes_to_gb, retry_mongo_upsert from optscale_client.herald_client.client_v2 import Client as HeraldClient +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) CHUNK_SIZE = 200 @@ -147,7 +148,7 @@ def _load_billing_items(self, current_day): def load_raw_data(self): chunk = [] - now = datetime.utcnow() + now = opttime.utcnow() current_day = self.period_start.replace( hour=0, minute=0, second=0, microsecond=0) while current_day <= now: @@ -260,8 +261,8 @@ def _get_resource_type(self, expense): return product_detail def get_resource_info_from_expenses(self, expenses): - first_seen = datetime.utcnow() - last_seen = datetime.utcfromtimestamp(0).replace() + first_seen = opttime.utcnow() + last_seen = opttime.utcfromtimestamp(0).replace() for e in expenses: start_date = e['start_date'] if start_date and start_date < first_seen: @@ -339,7 +340,7 @@ def _merge_same_billing_items(self, items): def get_full_months_in_period(self): full_month_dates = [] start = self.period_start - end = datetime.utcnow() + end = opttime.utcnow() month_start = datetime( year=start.year, month=start.month, day=1, hour=0, minute=0, second=0) @@ -551,7 +552,7 @@ def generate_clean_records(self, regeneration=False): super().generate_clean_records(regeneration=regeneration) if self.cloud_acc['last_import_at'] != 0: self.fix_snapshot_chain_expenses() - now = datetime.utcnow() + now = opttime.utcnow() if (self.period_start.month != now.month or self.period_start.year != now.year): self.check_exp_for_previous_month(now) diff --git a/diworker/diworker/importers/aws.py b/diworker/diworker/importers/aws.py index 1e310d6b6..8cedd6095 100644 --- a/diworker/diworker/importers/aws.py +++ b/diworker/diworker/importers/aws.py @@ -15,7 +15,7 @@ from functools import cached_property from diworker.diworker.importers.base import CSVBaseReportImporter - +import tools.optscale_time as opttime import pyarrow.parquet as pq LOG = logging.getLogger(__name__) @@ -78,7 +78,7 @@ def __init__(self, *args, **kwargs): 'Savings Plan': [], 'Reserved Instances': [] } - self.import_start_ts = int(datetime.utcnow().timestamp()) + self.import_start_ts = int(opttime.utcnow().timestamp()) self.current_billing_period = None @cached_property @@ -382,7 +382,7 @@ def _convert_to_legacy_csv_columns(self, columns, dict_format=False): def load_csv_report(self, report_path, account_id_ca_id_map, billing_period, skipped_accounts): - date_start = datetime.utcnow() + date_start = opttime.utcnow() with open(report_path, newline='') as csvfile: reader = csv.DictReader(csvfile) reader.fieldnames = self._convert_to_legacy_csv_columns( @@ -399,7 +399,7 @@ def load_csv_report(self, report_path, account_id_ca_id_map, if len(chunk) == CHUNK_SIZE: self.update_raw_records(chunk) chunk = [] - now = datetime.utcnow() + now = opttime.utcnow() if (now - date_start).total_seconds() > 60: LOG.info('report %s: processed %s rows', report_path, record_number) @@ -447,7 +447,7 @@ def load_csv_report(self, report_path, account_id_ca_id_map, def load_parquet_report(self, report_path, account_id_ca_id_map, billing_period, skipped_accounts): - date_start = datetime.utcnow() + date_start = opttime.utcnow() dataframe = pq.read_pandas(report_path).to_pandas() new_columns = self._convert_to_legacy_csv_columns( dataframe.columns, dict_format=True) @@ -514,7 +514,7 @@ def load_parquet_report(self, report_path, account_id_ca_id_map, self._set_resource_id(expense) if expenses: self.update_raw_records(expenses) - now = datetime.utcnow() + now = opttime.utcnow() if (now - date_start).total_seconds() > 60: LOG.info('report %s: processed %s rows', report_path, i) date_start = now diff --git a/diworker/diworker/importers/azure.py b/diworker/diworker/importers/azure.py index 392cf1cb6..fc7c8bb86 100644 --- a/diworker/diworker/importers/azure.py +++ b/diworker/diworker/importers/azure.py @@ -2,6 +2,8 @@ import json import logging from datetime import datetime, timezone, timedelta + +import tools.optscale_time as opttime from diworker.diworker.utils import retry_backoff from tools.cloud_adapter.clouds.azure import ( AzureConsumptionException, ExpenseImportScheme, @@ -214,7 +216,7 @@ def _load_raw_usage_data(self, prices): skus_without_prices = set() current_day = self.period_start.replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) - last_day = datetime.utcnow().replace( + last_day = opttime.utcnow().replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) while current_day < last_day: LOG.info('Processing raw expenses for %s', current_day) @@ -433,8 +435,8 @@ def get_resource_info_from_expenses(self, expenses): tags = self.extract_tags(expenses[-1].get('tags', {})) service = expenses[-1].get('consumed_service') - first_seen = datetime.utcnow() - last_seen = datetime.utcfromtimestamp(0).replace() + first_seen = opttime.utcnow() + last_seen = opttime.utcfromtimestamp(0).replace() for e in expenses: start_date = e['start_date'] if start_date and start_date < first_seen: diff --git a/diworker/diworker/importers/base.py b/diworker/diworker/importers/base.py index befd6a560..746c56f9c 100644 --- a/diworker/diworker/importers/base.py +++ b/diworker/diworker/importers/base.py @@ -14,6 +14,7 @@ from boto3.session import Config as BotoConfig from tools.cloud_adapter.cloud import Cloud as CloudAdapter from diworker.diworker.utils import retry_mongo_upsert, get_month_start +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) CHUNK_SIZE = 200 @@ -40,7 +41,7 @@ def __init__(self, cloud_account_id, rest_cl, config_cl, mongo_raw, if detect_period_start: self.detect_period_start() self.imported_raw_dates_map = defaultdict(dict) - self.report_identity = datetime.utcnow().timestamp() + self.report_identity = opttime.utcnow().timestamp() @property def cloud_acc(self): @@ -477,12 +478,12 @@ def extract_tag(tag_name, value): def detect_period_start(self): ca_last_import_at = self.cloud_acc.get('last_import_at') if (ca_last_import_at and - datetime.utcfromtimestamp( - ca_last_import_at).month == datetime.utcnow().month): + opttime.utcfromtimestamp( + ca_last_import_at).month == opttime.utcnow().month): last_import_at = self.get_last_import_date(self.cloud_acc_id) # someone cleared expenses collection if not last_import_at: - last_import_at = datetime.utcfromtimestamp( + last_import_at = opttime.utcfromtimestamp( self.cloud_acc['last_import_at']) if last_import_at.day == 1: self.period_start = get_month_start( @@ -490,7 +491,7 @@ def detect_period_start(self): else: self.period_start = last_import_at elif ca_last_import_at: - self.period_start = datetime.utcfromtimestamp( + self.period_start = opttime.utcfromtimestamp( self.cloud_acc['last_import_at']) self.remove_raw_expenses_from_period_start(self.cloud_acc_id) @@ -499,11 +500,11 @@ def detect_period_start(self): def set_period_start(self): if self.need_extend_report_interval: - this_month_start = datetime.utcnow().replace( + this_month_start = opttime.utcnow().replace( day=1, hour=0, minute=0, second=0, microsecond=0) self.period_start = this_month_start - relativedelta(months=+3) else: - self.period_start = get_month_start(datetime.utcnow()) + self.period_start = get_month_start(opttime.utcnow()) def get_last_import_date(self, cloud_account_id, tzinfo=None): max_dt = self.clickhouse_cl.execute( diff --git a/diworker/diworker/importers/databricks.py b/diworker/diworker/importers/databricks.py index 272cf3ef5..0cbe6a71d 100644 --- a/diworker/diworker/importers/databricks.py +++ b/diworker/diworker/importers/databricks.py @@ -6,6 +6,7 @@ from datetime import datetime, timezone, timedelta from tools.cloud_adapter.clouds.databricks import DEFAULT_SKU_PRICES from diworker.diworker.importers.base import BaseReportImporter +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) CHUNK_SIZE = 200 @@ -46,7 +47,7 @@ def detect_period_start(self): def load_raw_data(self): start_date = self.period_start.strftime("%Y-%m") - end_date = datetime.utcnow().strftime("%Y-%m") + end_date = opttime.utcnow().strftime("%Y-%m") usages = self.cloud_adapter.download_usage( start_date, end_date) if not usages: @@ -99,8 +100,8 @@ def _get_cloud_extras(self, info): return res def get_resource_info_from_expenses(self, expenses): - first_seen = datetime.utcnow() - last_seen = datetime.utcfromtimestamp(0) + first_seen = opttime.utcnow() + last_seen = opttime.utcfromtimestamp(0) meta_dict = {} resource_type = None name = None diff --git a/diworker/diworker/importers/environment.py b/diworker/diworker/importers/environment.py index 2d7f0aa49..ec5947b91 100644 --- a/diworker/diworker/importers/environment.py +++ b/diworker/diworker/importers/environment.py @@ -2,6 +2,7 @@ import logging from datetime import datetime, timedelta from diworker.diworker.importers.base import BaseReportImporter +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) CHUNK_SIZE = 200 @@ -40,7 +41,7 @@ def get_unique_field_list(self): ] def load_raw_data(self): - now = datetime.utcnow() + now = opttime.utcnow() org_id = self.cloud_acc['organization_id'] _, resources = self.rest_cl.environment_resource_list(org_id) @@ -91,8 +92,8 @@ def load_raw_data(self): self.update_raw_records(chunk) def get_resource_info_from_expenses(self, expenses): - first_seen = datetime.utcnow() - last_seen = datetime.utcfromtimestamp(0) + first_seen = opttime.utcnow() + last_seen = opttime.utcfromtimestamp(0) resource_type = None for e in expenses: if not resource_type: diff --git a/diworker/diworker/importers/gcp.py b/diworker/diworker/importers/gcp.py index 435ddf945..4898ddddf 100644 --- a/diworker/diworker/importers/gcp.py +++ b/diworker/diworker/importers/gcp.py @@ -1,8 +1,9 @@ from collections import defaultdict import hashlib import logging -from datetime import datetime, timedelta, timezone +from datetime import timedelta from diworker.diworker.importers.base import BaseReportImporter +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) WRITE_CHUNK_SIZE = 200 @@ -16,9 +17,8 @@ class GcpReportImporter(BaseReportImporter): def detect_period_start(self): ca_last_import_at = self.cloud_acc.get('last_import_at') - if (ca_last_import_at and datetime.utcfromtimestamp( - ca_last_import_at).month == datetime.now( - tz=timezone.utc).month): + if (ca_last_import_at and opttime.utcfromtimestamp( + ca_last_import_at).month == opttime.utcnow().month): # When choosing period_start for GCP, prioritize last expense # date over date of the last import run. That is because for GCP # the latest expenses are not available immediately and we need to @@ -137,7 +137,7 @@ def _merge_same_billing_items(self, items): def load_raw_data(self): current_day = self.period_start.replace( hour=0, minute=0, second=0, microsecond=0) - now = datetime.utcnow() + now = opttime.utcnow() while current_day <= now: chunk = [] end_date = current_day + timedelta(days=1) @@ -200,8 +200,8 @@ def get_resource_info_from_expenses(self, expenses): expenses[-1]) service = expenses[-1].get('service') region = self.cloud_adapter.fix_region(expenses[-1].get('region')) - first_seen = datetime.utcnow() - last_seen = datetime.utcfromtimestamp(0).replace() + first_seen = opttime.utcnow() + last_seen = opttime.utcfromtimestamp(0).replace() tags = {} system_tags = {} for e in expenses: diff --git a/diworker/diworker/importers/kubernetes.py b/diworker/diworker/importers/kubernetes.py index dcd84f4b8..3e3b24f18 100644 --- a/diworker/diworker/importers/kubernetes.py +++ b/diworker/diworker/importers/kubernetes.py @@ -11,6 +11,7 @@ from tools.cloud_adapter.cloud import Cloud as CloudAdapter from optscale_client.insider_client.client import Client as InsiderClient +import tools.optscale_time as opttime LOG = logging.getLogger(__name__) CHUNK_SIZE = 200 @@ -350,7 +351,7 @@ def recalculate_raw_expenses(self): self.mongo_raw.bulk_write(changes) def load_raw_data(self): - now = datetime.utcnow() + now = opttime.utcnow() dt = now + timedelta(days=1) days = (dt - self.period_start).days LOG.info('Loading metrics for period %s - %s' % ( @@ -394,9 +395,9 @@ def load_raw_data(self): expense = r['metric'].copy() pod_name = expense.get('pod') resource_id = expense.pop('id').split('/pod')[-1] - start_date = datetime.utcfromtimestamp( + start_date = opttime.utcfromtimestamp( dt_timestamp) - timedelta(days=1) - end_date = datetime.utcfromtimestamp(dt_timestamp) + end_date = opttime.utcfromtimestamp(dt_timestamp) if end_date > now: end_date = now worked_hrs = (end_date - start_date @@ -455,9 +456,9 @@ def get_unique_field_list(self): ] def get_resource_info_from_expenses(self, expenses): - first_seen = datetime.utcnow() + first_seen = opttime.utcnow() k8s_node, name, k8s_namespace, k8s_service, job = None, None, None, None, None - last_seen = datetime.utcfromtimestamp(0).replace() + last_seen = opttime.utcfromtimestamp(0).replace() for e in expenses: if not name: name = e.get('pod') diff --git a/diworker/diworker/importers/nebius.py b/diworker/diworker/importers/nebius.py index dc8285d94..fc399bd70 100644 --- a/diworker/diworker/importers/nebius.py +++ b/diworker/diworker/importers/nebius.py @@ -3,6 +3,7 @@ import logging from collections import defaultdict from datetime import datetime, timezone +import tools.optscale_time as opttime from diworker.diworker.importers.base import CSVBaseReportImporter @@ -93,7 +94,7 @@ def load_report(self, report_path, account_id_ca_id_map): def load_csv_report(self, report_path, account_id_ca_id_map, billing_period, skipped_accounts): - date_start = datetime.utcnow() + date_start = opttime.utcnow() with open(report_path, newline='') as csvfile: reader = csv.DictReader(csvfile) chunk = [] @@ -106,7 +107,7 @@ def load_csv_report(self, report_path, account_id_ca_id_map, if len(chunk) == CHUNK_SIZE: self.update_raw_records(chunk) chunk = [] - now = datetime.utcnow() + now = opttime.utcnow() if (now - date_start).total_seconds() > 60: LOG.info('report %s: processed %s rows', report_path, record_number) @@ -189,8 +190,8 @@ def get_resource_type(self, expense): return sku_name def get_resource_info_from_expenses(self, expenses): - first_seen = datetime.utcnow() - last_seen = datetime.utcfromtimestamp(0).replace() + first_seen = opttime.utcnow() + last_seen = opttime.utcfromtimestamp(0).replace() tags = {} for e in expenses: start_date = e['start_date'] diff --git a/diworker/diworker/migrations/20200825160000_azure_raw_updates.py b/diworker/diworker/migrations/20200825160000_azure_raw_updates.py index 7aae6735e..d3d45c759 100644 --- a/diworker/diworker/migrations/20200825160000_azure_raw_updates.py +++ b/diworker/diworker/migrations/20200825160000_azure_raw_updates.py @@ -80,7 +80,7 @@ def add_new_fields(self): self.db.raw_expenses.bulk_write(update_requests) def remove_azure_data_from_current_month(self): - month_regex = datetime.utcnow().strftime('%Y-%m') + month_regex = datetime.now(tz=timezone.utc).strftime('%Y-%m') self.db.raw_expenses.delete_many({'usage_start': {'$regex': month_regex}}) def update_resource_id_in_clean_expenses(self): diff --git a/diworker/diworker/migrations/202110200930000_first_last_expenses_in_grouped_collections.py b/diworker/diworker/migrations/202110200930000_first_last_expenses_in_grouped_collections.py index 388f7134e..afbf60e41 100644 --- a/diworker/diworker/migrations/202110200930000_first_last_expenses_in_grouped_collections.py +++ b/diworker/diworker/migrations/202110200930000_first_last_expenses_in_grouped_collections.py @@ -3,7 +3,7 @@ """ import logging from calendar import monthrange -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from diworker.diworker.migrations.base import BaseMigration from pymongo import UpdateOne @@ -27,8 +27,9 @@ def mongo_group_month_ca(self): @staticmethod def get_max_last_expense(): - return (datetime.utcnow().replace( - hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1)) + return (datetime.now(tz=timezone.utc).replace( + hour=0, minute=0, second=0, microsecond=0, tzinfo=None + ) - timedelta(days=1)) @staticmethod def set_first_last_expenses(collection, info, months_list, grouping_field): diff --git a/diworker/diworker/migrations/202112081330000_move_expenses_clickhouse.py b/diworker/diworker/migrations/202112081330000_move_expenses_clickhouse.py index ae14be620..1f898b374 100644 --- a/diworker/diworker/migrations/202112081330000_move_expenses_clickhouse.py +++ b/diworker/diworker/migrations/202112081330000_move_expenses_clickhouse.py @@ -44,7 +44,7 @@ def upgrade(self): org_ids.extend(list(map(lambda x: x['id'], orgs['organizations']))) self.clear_clickhouse_db(clickhouse_client) for i, organization_id in enumerate(org_ids): - start_dt = datetime.utcnow() + start_dt = datetime.now() _, resp = rest_client.cloud_account_list(organization_id) cloud_account_ids = list(map( lambda x: x['id'], resp['cloud_accounts'])) @@ -74,7 +74,7 @@ def upgrade(self): if bulk: total_migrated += len(bulk) self._write_to_clickhouse(clickhouse_client, bulk) - execution_time = (datetime.utcnow() - start_dt).total_seconds() + execution_time = (datetime.now() - start_dt).total_seconds() LOG.info('Expenses for org %s: completed at %s seconds' % ( organization_id, execution_time)) diff --git a/diworker/diworker/migrations/2022061711400000_traffic_expenses_region_fix_aws.py b/diworker/diworker/migrations/2022061711400000_traffic_expenses_region_fix_aws.py index 3e27991fb..9945a4d43 100644 --- a/diworker/diworker/migrations/2022061711400000_traffic_expenses_region_fix_aws.py +++ b/diworker/diworker/migrations/2022061711400000_traffic_expenses_region_fix_aws.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from diworker.diworker.migrations.base import BaseMigration from clickhouse_driver import Client as ClickHouseClient from optscale_client.rest_api_client.client_v2 import Client as RestClient @@ -75,7 +75,7 @@ def upgrade(self): """, params={'cloud_accounts': accs_to_fix} ) clickhouse_client.execute('OPTIMIZE TABLE traffic_expenses FINAL') - now = int(datetime.utcnow().timestamp()) + now = int(datetime.now(tz=timezone.utc).timestamp()) for ca in accs_to_fix: LOG.info('Create traffic processing task for %s' % ca) self.rest_cl.traffic_processing_task_create(ca, { diff --git a/diworker/diworker/migrations/2022070516350000_rework_traffic_expenses.py b/diworker/diworker/migrations/2022070516350000_rework_traffic_expenses.py index c5a7107c8..e8075b572 100644 --- a/diworker/diworker/migrations/2022070516350000_rework_traffic_expenses.py +++ b/diworker/diworker/migrations/2022070516350000_rework_traffic_expenses.py @@ -2,7 +2,7 @@ from diworker.diworker.migrations.base import BaseMigration from clickhouse_driver import Client as ClickHouseClient from optscale_client.rest_api_client.client_v2 import Client as RestClient -from datetime import datetime +from datetime import datetime, timezone """ Clickhouse traffic expenses table rework. @@ -31,7 +31,7 @@ def upgrade(self): clickhouse_cl.execute('OPTIMIZE TABLE traffic_expenses FINAL') LOG.info('Creating traffic processing tasks') _, orgs = self.rest_cl.organization_list() - now_ts = int(datetime.utcnow().timestamp()) + now_ts = int(datetime.now(tz=timezone.utc).timestamp()) cnt = 0 for org in orgs['organizations']: _, accs = self.rest_cl.cloud_account_list(org['id']) diff --git a/diworker/diworker/migrations/2022112611000000_created_at_for_aws_expenses.py b/diworker/diworker/migrations/2022112611000000_created_at_for_aws_expenses.py index 4ce681e2e..a052281dd 100644 --- a/diworker/diworker/migrations/2022112611000000_created_at_for_aws_expenses.py +++ b/diworker/diworker/migrations/2022112611000000_created_at_for_aws_expenses.py @@ -67,7 +67,7 @@ def create_index(self, index_fields_list): LOG.info('Index %s already exists' % INDEX_NAME) def upgrade(self): - now = int(datetime.utcnow().timestamp()) + now = int(datetime.now(tz=timezone.utc).timestamp()) cloud_account_ids = self.get_cloud_account_ids() for i, cloud_account_id in enumerate(cloud_account_ids): LOG.info('Started updating raw expenses for ' diff --git a/diworker/diworker/migrations/2023040412300000_sp_ri_expenses.py b/diworker/diworker/migrations/2023040412300000_sp_ri_expenses.py index f4ee1cebb..37e394231 100644 --- a/diworker/diworker/migrations/2023040412300000_sp_ri_expenses.py +++ b/diworker/diworker/migrations/2023040412300000_sp_ri_expenses.py @@ -1,7 +1,7 @@ import logging from clickhouse_driver import Client as ClickHouseClient from collections import defaultdict -from datetime import datetime +from datetime import datetime, timezone from dateutil.relativedelta import relativedelta from diworker.diworker.importers.aws import AWSReportImporter from diworker.diworker.migrations.base import BaseMigration @@ -105,7 +105,7 @@ def delete_clickhouse_expenses(self, cloud_account_id, resource_ids): @staticmethod def get_months(start_date): months_starts = [] - end_date = datetime.utcnow() + end_date = datetime.now(tz=timezone.utc).replace(tzinfo=None) while start_date < end_date: months_starts.append(start_date) start_date = start_date + relativedelta(months=1) @@ -197,7 +197,7 @@ def process(self, cloud_account_id, resource_type): old_cloud_res_ids = set() new_cloud_res_ids = set() affected_cloud_res_ids = set() - min_date = datetime.utcnow() + min_date = datetime.now(tz=timezone.utc).replace(tzinfo=None) update_count = 0 for i in range(0, len(ri_sp_expenses), RAW_EXPENSES_CHUNK_SIZE): raw_expenses_ids_chunk = ri_sp_expenses[i:i+RAW_EXPENSES_CHUNK_SIZE] diff --git a/diworker/diworker/migrations/2023112915300000_resource_dates.py b/diworker/diworker/migrations/2023112915300000_resource_dates.py index edcfe387d..1e4e680b2 100644 --- a/diworker/diworker/migrations/2023112915300000_resource_dates.py +++ b/diworker/diworker/migrations/2023112915300000_resource_dates.py @@ -1,7 +1,7 @@ import logging from diworker.diworker.migrations.base import BaseMigration from optscale_client.rest_api_client.client_v2 import Client as RestClient -from datetime import datetime +from datetime import datetime, timezone from pymongo import UpdateOne """ @@ -41,8 +41,8 @@ def resources(self): @staticmethod def to_start_date(timestamp): - return datetime.utcfromtimestamp(timestamp).replace( - hour=0, minute=0, second=0, microsecond=0) + return datetime.fromtimestamp(timestamp, tz=timezone.utc).replace( + hour=0, minute=0, second=0, microsecond=0, tzinfo=None) def add_date_fields(self): for is_demo in [False, True]: diff --git a/diworker/requirements.txt b/diworker/requirements.txt index 05eb8308e..026310613 100644 --- a/diworker/requirements.txt +++ b/diworker/requirements.txt @@ -9,6 +9,7 @@ clickhouse-driver==0.2.6 pyrabbit==1.1.0 # OptScale packages -e tools/cloud_adapter +-e tools/optscale_time -e optscale_client/config_client -e optscale_client/herald_client -e optscale_client/insider_client diff --git a/docker_images/bi_scheduler/scheduler.py b/docker_images/bi_scheduler/scheduler.py index ad9cf8a8e..50b842cd4 100644 --- a/docker_images/bi_scheduler/scheduler.py +++ b/docker_images/bi_scheduler/scheduler.py @@ -1,6 +1,6 @@ import logging import os -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from functools import cached_property from typing import Dict, List @@ -30,7 +30,7 @@ class BIScheduler: def __init__(self, config_cl: ConfigClient) -> None: self.config_cl = config_cl - self.now = datetime.utcnow() + self.now = datetime.now(tz=timezone.utc).replace(tzinfo=None) @cached_property def rest_cl(self) -> RestClient: diff --git a/docker_images/booking_observer/Dockerfile b/docker_images/booking_observer/Dockerfile index f6352eece..ee3bde105 100644 --- a/docker_images/booking_observer/Dockerfile +++ b/docker_images/booking_observer/Dockerfile @@ -5,6 +5,7 @@ ENV PYTHONPATH /usr/src/app/ WORKDIR /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/booking_observer/requirements.txt . RUN pip install --no-cache-dir -r requirements.txt COPY docker_images/booking_observer/scheduler.py docker_images/booking_observer/worker.py docker_images/booking_observer/ diff --git a/docker_images/booking_observer/requirements.txt b/docker_images/booking_observer/requirements.txt index 0b57d93a9..1c138d29d 100644 --- a/docker_images/booking_observer/requirements.txt +++ b/docker_images/booking_observer/requirements.txt @@ -3,3 +3,4 @@ pymongo==4.6.3 # OptScale packages -e optscale_client/config_client -e optscale_client/rest_api_client +-e tools/optscale_time diff --git a/docker_images/booking_observer/scheduler.py b/docker_images/booking_observer/scheduler.py index 922040076..ce3c0032e 100644 --- a/docker_images/booking_observer/scheduler.py +++ b/docker_images/booking_observer/scheduler.py @@ -1,12 +1,12 @@ import logging import os -from datetime import datetime from kombu import Connection as QConnection, Exchange from kombu.pools import producers from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow_timestamp LOG = logging.getLogger(__name__) @@ -25,7 +25,7 @@ def publish_tasks(org_ids, config_client): producer.publish( { 'organization_id': org_id, - 'observe_time': int(datetime.utcnow().timestamp()) + 'observe_time': utcnow_timestamp() }, serializer='json', exchange=task_exchange, diff --git a/docker_images/booking_observer/worker.py b/docker_images/booking_observer/worker.py index 85731d4e3..5062b1964 100644 --- a/docker_images/booking_observer/worker.py +++ b/docker_images/booking_observer/worker.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import os import time -from datetime import datetime from threading import Thread from pymongo import MongoClient, UpdateOne from kombu.mixins import ConsumerMixin @@ -14,6 +13,7 @@ from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow_timestamp, utcnow BOOKING_OBSERVER_EXCHANGE_NAME = 'booking-activities' BOOKING_OBSERVER_QUEUE_NAME = 'booking-activity' @@ -65,10 +65,10 @@ def process_booking_activities(self, task): if not org_id or not observe_time: raise Exception('Invalid task received: {}'.format(task)) - start_time = datetime.utcnow() + start_time = utcnow() self._process(org_id, observe_time) LOG.info('Booking observer process for org %s completed in %s seconds', - org_id, (datetime.utcnow() - start_time).total_seconds()) + org_id, (utcnow() - start_time).total_seconds()) def get_start_date(self, organization_id): try: @@ -93,7 +93,7 @@ def _update_observe_time(self, observe_time, org_id): ]) def _process(self, organization_id, observe_time): - end_date = int(datetime.utcnow().timestamp()) + end_date = utcnow_timestamp() start_date = self.get_start_date(organization_id) _, bookings = self.rest_cl.shareable_book_list( organization_id, start_date, end_date) diff --git a/docker_images/cleanmongodb/clean-mongo-db.py b/docker_images/cleanmongodb/clean-mongo-db.py index 289b69724..dd5d0f95d 100644 --- a/docker_images/cleanmongodb/clean-mongo-db.py +++ b/docker_images/cleanmongodb/clean-mongo-db.py @@ -3,7 +3,7 @@ import logging from bson.objectid import ObjectId from optscale_client.config_client.client import Client as ConfigClient -from datetime import datetime +from datetime import datetime, timezone from pymongo import MongoClient from sqlalchemy import create_engine from sqlalchemy.orm import Session @@ -248,7 +248,7 @@ def get_deleted_cloud_account(self): def update_cleaned_at(self, cloud_account_id=None, organization_id=None): session = self.get_session() - now = int(datetime.utcnow().timestamp()) + now = int(datetime.now(tz=timezone.utc).timestamp()) if cloud_account_id: LOG.info( f'Updating cleaned_at for cloud account {cloud_account_id}') diff --git a/docker_images/demo_org_cleanup/cleanup.py b/docker_images/demo_org_cleanup/cleanup.py index 7e8d6d466..230da3c51 100644 --- a/docker_images/demo_org_cleanup/cleanup.py +++ b/docker_images/demo_org_cleanup/cleanup.py @@ -1,6 +1,6 @@ import logging import os -from datetime import timedelta, datetime +from datetime import timedelta, datetime, timezone from requests import HTTPError @@ -15,7 +15,8 @@ def main(config_cl): rest_cl.secret = config_cl.cluster_secret() _, response = rest_cl.organization_list({'is_demo': True}) - old_org_ts = int((datetime.utcnow() - timedelta(days=7)).timestamp()) + old_org_ts = int((datetime.now(tz=timezone.utc) - timedelta( + days=7)).timestamp()) for org in response['organizations']: if org['created_at'] > old_org_ts: continue diff --git a/docker_images/failed_imports_dataset_generator/Dockerfile b/docker_images/failed_imports_dataset_generator/Dockerfile index f592cb055..ef7e939d3 100644 --- a/docker_images/failed_imports_dataset_generator/Dockerfile +++ b/docker_images/failed_imports_dataset_generator/Dockerfile @@ -5,6 +5,7 @@ WORKDIR /usr/src/app/ ENV PYTHONPATH /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/failed_imports_dataset_generator/requirements.txt docker_images/failed_imports_dataset_generator/ RUN pip install --no-cache-dir -r docker_images/failed_imports_dataset_generator/requirements.txt diff --git a/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py b/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py index 8dc5b862d..d62f75626 100644 --- a/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py +++ b/docker_images/failed_imports_dataset_generator/failed_imports_dataset_generator.py @@ -3,11 +3,11 @@ import logging import os from urllib import parse -from datetime import datetime from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker import boto3 from optscale_client.config_client.client import Client as ConfigClient +from tools.optscale_time import utcfromtimestamp, utcnow_timestamp DEFAULT_ETCD_HOST = 'etcd' DEFAULT_ETCD_PORT = 80 @@ -89,13 +89,13 @@ def _dt_to_human_readable(dt): def _timestamp_to_human_readable(ts): try: - return _dt_to_human_readable(datetime.utcfromtimestamp(float(ts))) + return _dt_to_human_readable(utcfromtimestamp(float(ts))) except Exception: return None def _get_failed_cloud_accounts(mydb): - now = datetime.utcnow().timestamp() + now = utcnow_timestamp() import_dt = now - IMPORT_THRESHOLD query = f""" SELECT ca_t.id, ca_t.name, ca_t.type, ca_t.created_at, diff --git a/docker_images/failed_imports_dataset_generator/requirements.txt b/docker_images/failed_imports_dataset_generator/requirements.txt index 2ac35a390..06d17c4b6 100644 --- a/docker_images/failed_imports_dataset_generator/requirements.txt +++ b/docker_images/failed_imports_dataset_generator/requirements.txt @@ -4,3 +4,4 @@ SQLAlchemy==1.3.24 # OptScale packages -e optscale_client/config_client +-e tools/optscale_time diff --git a/docker_images/herald_executor/Dockerfile b/docker_images/herald_executor/Dockerfile index 8d7ed4ba9..e58139ac5 100644 --- a/docker_images/herald_executor/Dockerfile +++ b/docker_images/herald_executor/Dockerfile @@ -7,6 +7,7 @@ COPY optscale_client/auth_client optscale_client/auth_client COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY optscale_client/herald_client optscale_client/herald_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/herald_executor/requirements.txt docker_images/herald_executor/ RUN pip install --no-cache-dir -r docker_images/herald_executor/requirements.txt diff --git a/docker_images/herald_executor/requirements.txt b/docker_images/herald_executor/requirements.txt index 3423dd324..5be193782 100644 --- a/docker_images/herald_executor/requirements.txt +++ b/docker_images/herald_executor/requirements.txt @@ -5,3 +5,4 @@ currency-symbols==2.0.3 -e optscale_client/rest_api_client -e optscale_client/herald_client -e optscale_client/auth_client +-e tools/optscale_time diff --git a/docker_images/herald_executor/worker.py b/docker_images/herald_executor/worker.py index f62abf2e9..22131f184 100644 --- a/docker_images/herald_executor/worker.py +++ b/docker_images/herald_executor/worker.py @@ -19,6 +19,7 @@ from optscale_client.herald_client.client_v2 import Client as HeraldClient from optscale_client.auth_client.client_v2 import Client as AuthClient from currency_symbols.currency_symbols import CURRENCY_SYMBOLS_MAP +from tools.optscale_time import utcnow_timestamp, utcfromtimestamp LOG = get_logger(__name__) @@ -178,7 +179,7 @@ def send_environment_changes(self, resource, status_changed_info, return env_properties = OrderedDict(resource.get('env_properties', {})) resource_id = resource.get('_id') or resource.get('id') - now_ts = int(datetime.utcnow().timestamp()) + now_ts = utcnow_timestamp() _, shareable_bookings = self.rest_cl.shareable_book_list( organization_id, 0, int(datetime.max.replace(tzinfo=timezone.utc).timestamp())) @@ -229,9 +230,9 @@ def format_remained_time(start_date, end_date): released_at = booking['released_at'] acquired_by_id = booking.get('acquired_by_id') utc_acquired_since = int( - datetime.utcfromtimestamp(acquired_since).timestamp()) + utcfromtimestamp(acquired_since).timestamp()) utc_released_at = int( - datetime.utcfromtimestamp(released_at).timestamp()) + utcfromtimestamp(released_at).timestamp()) user_name = employee_id_map.get(acquired_by_id, {}).get('name') if not user_name: LOG.error('Could not detect employee name for booking %s', @@ -477,14 +478,14 @@ def _get_org_constraint_link(self, constraint, created_at, filters): if val is None: v[i] = get_nil_uuid() link_filters[f] = v - created = datetime.utcfromtimestamp(created_at) + created = utcfromtimestamp(created_at) if constraint['type'] in ['expense_anomaly', 'resource_count_anomaly']: start_date = datetime.combine(created, created.time().min) - timedelta( days=constraint['definition']['threshold_days']) end_date = datetime.combine(created, created.time().max) + timedelta( days=1) elif constraint['type'] == 'expiring_budget': - start_date = datetime.utcfromtimestamp( + start_date = utcfromtimestamp( constraint['definition']['start_date']) end_date = None elif constraint['type'] == 'recurring_budget': @@ -599,7 +600,7 @@ def execute_organization_constraint_violated(self, constraint_id, 'error code: %s' % (constraint_id, code)) latest_hit = max(hits['organization_limit_hits'], key=lambda x: x['created_at']) - hit_date = datetime.utcfromtimestamp( + hit_date = utcfromtimestamp( latest_hit['created_at']).strftime('%m/%d/%Y %I:%M %p UTC') if constraint['type'] not in CONSTRAINT_TYPES: raise Exception('Unknown organization constraint ' @@ -609,7 +610,7 @@ def execute_organization_constraint_violated(self, constraint_id, link = self._get_org_constraint_link( constraint, latest_hit['created_at'], c_filters) if constraint['type'] in ['expiring_budget', 'tagging_policy']: - constraint_data['definition']['start_date'] = datetime.utcfromtimestamp( + constraint_data['definition']['start_date'] = utcfromtimestamp( int(constraint_data['definition']['start_date'])).strftime( '%m/%d/%Y %I:%M %p UTC') managers = self.get_owner_manager_infos(organization_id) diff --git a/docker_images/live_demo_generator/Dockerfile b/docker_images/live_demo_generator/Dockerfile index 62224d796..5a5289f98 100644 --- a/docker_images/live_demo_generator/Dockerfile +++ b/docker_images/live_demo_generator/Dockerfile @@ -5,6 +5,7 @@ ENV PYTHONPATH /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/live_demo_generator/requirements.txt docker_images/live_demo_generator/ RUN pip install --no-cache-dir -r docker_images/live_demo_generator/requirements.txt diff --git a/docker_images/live_demo_generator/requirements.txt b/docker_images/live_demo_generator/requirements.txt index 55459bc9b..07fa09009 100644 --- a/docker_images/live_demo_generator/requirements.txt +++ b/docker_images/live_demo_generator/requirements.txt @@ -4,3 +4,4 @@ pymongo==4.6.3 # OptScale packages -e optscale_client/config_client -e optscale_client/rest_api_client +-e tools/optscale_time diff --git a/docker_images/live_demo_generator/scheduler.py b/docker_images/live_demo_generator/scheduler.py index 76d6e7463..a229fb774 100644 --- a/docker_images/live_demo_generator/scheduler.py +++ b/docker_images/live_demo_generator/scheduler.py @@ -5,6 +5,7 @@ from kombu.pools import producers from pymongo import MongoClient from optscale_client.config_client.client import Client as ConfigClient +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) DEMO_LIFETIME_DAYS = 2 @@ -38,7 +39,7 @@ def main(config_client): mongo_conn_string = "mongodb://%s:%s@%s:%s" % mongo_params[:-1] mongo_cl = MongoClient(mongo_conn_string) live_demos_collection = mongo_cl.restapi.live_demos - dt = datetime.utcnow() - timedelta(days=DEMO_LIFETIME_DAYS) + dt = utcnow() - timedelta(days=DEMO_LIFETIME_DAYS) count = DEMO_COUNT - live_demos_collection.count_documents({ 'created_at': {'$gte': int(dt.timestamp())} }) diff --git a/docker_images/live_demo_generator/worker.py b/docker_images/live_demo_generator/worker.py index 5e085eac2..ae42cb04a 100644 --- a/docker_images/live_demo_generator/worker.py +++ b/docker_images/live_demo_generator/worker.py @@ -14,6 +14,7 @@ from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow EXCHANGE_NAME = 'live-demo-generations' QUEUE_NAME = 'live-demo-generation' @@ -66,12 +67,12 @@ def heartbeat(self): time.sleep(1) def generate_live_demo(self): - d_start = datetime.utcnow() + d_start = utcnow() _, response = self.rest_cl.live_demo_create() response['created_at'] = int(d_start.timestamp()) self.mongo_cl.restapi.live_demos.insert_one(response) LOG.info('Live demo generated in %s seconds', - (datetime.utcnow() - d_start).total_seconds()) + (utcnow() - d_start).total_seconds()) if __name__ == '__main__': diff --git a/docker_images/ohsu/controllers/link.py b/docker_images/ohsu/controllers/link.py index 98ccda7c9..e499ea38e 100644 --- a/docker_images/ohsu/controllers/link.py +++ b/docker_images/ohsu/controllers/link.py @@ -3,7 +3,7 @@ import socket import subprocess import time -from datetime import datetime +from datetime import datetime, timezone from requests.exceptions import HTTPError from docker_images.ohsu.controllers.base import BaseController from docker_images.ohsu.controllers.base_async import BaseAsyncControllerWrapper @@ -65,8 +65,8 @@ def _get_shs_port(organization_id): port_str = "Successfully started service 'HistoryServerUI' on port " # spark history server needs some time to start and save logs - start_ts = int(datetime.utcnow().timestamp()) - while int(datetime.utcnow().timestamp()) < ( + start_ts = int(datetime.now(tz=timezone.utc).timestamp()) + while int(datetime.now(tz=timezone.utc).timestamp()) < ( start_ts + SHS_START_TIMEOUT): if os.path.exists(log_file_path): with open(log_file_path, 'r') as f: diff --git a/docker_images/organization_violations/Dockerfile b/docker_images/organization_violations/Dockerfile index 2caa9b862..b029effdc 100644 --- a/docker_images/organization_violations/Dockerfile +++ b/docker_images/organization_violations/Dockerfile @@ -5,6 +5,7 @@ ENV PYTHONPATH /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/organization_violations/requirements.txt docker_images/organization_violations/ RUN pip install --no-cache-dir -r docker_images/organization_violations/requirements.txt diff --git a/docker_images/organization_violations/requirements.txt b/docker_images/organization_violations/requirements.txt index 643e6dab9..879b753ab 100644 --- a/docker_images/organization_violations/requirements.txt +++ b/docker_images/organization_violations/requirements.txt @@ -3,3 +3,4 @@ kombu==5.3.4 # OptScale packages -e optscale_client/config_client -e optscale_client/rest_api_client +-e tools/optscale_time diff --git a/docker_images/organization_violations/scheduler.py b/docker_images/organization_violations/scheduler.py index 68d478038..ca3037503 100644 --- a/docker_images/organization_violations/scheduler.py +++ b/docker_images/organization_violations/scheduler.py @@ -1,11 +1,11 @@ import logging import os -from datetime import datetime from kombu import Connection as QConnection, Exchange from kombu.pools import producers from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) @@ -14,7 +14,7 @@ def publish_tasks(org_ids, config_cl): - now = int(datetime.utcnow().timestamp()) + now = int(utcnow().timestamp()) queue_conn = QConnection('amqp://{user}:{pass}@{host}:{port}'.format( **config_cl.read_branch('/rabbit')), transport_options=RETRY_POLICY) diff --git a/docker_images/organization_violations/worker.py b/docker_images/organization_violations/worker.py index aced2d257..8947c3067 100644 --- a/docker_images/organization_violations/worker.py +++ b/docker_images/organization_violations/worker.py @@ -17,6 +17,7 @@ from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow LOG = get_logger(__name__) @@ -395,7 +396,7 @@ def process_tagging_policy(self, constraint, organization_id, date, notifications, execution_start_ts): c_id = constraint['id'] start_date = constraint['definition']['start_date'] - if datetime.fromtimestamp(start_date) > datetime.utcnow(): + if datetime.fromtimestamp(start_date) > utcnow(): LOG.info(f'Constraint {c_id} is skipped due to start_date') return [] today_date = datetime.fromtimestamp(execution_start_ts) @@ -501,7 +502,7 @@ def process_organization_constraint( pass def process_organization_constraints(self, task): - start = datetime.utcnow() + start = utcnow() start_ts = int(start.timestamp()) org_id = task.get('organization_id') date_ts = task.get('date') @@ -536,7 +537,7 @@ def process_organization_constraints(self, task): self.publish_activities_tasks(notif_tasks) LOG.info('Organization violation process for organization %s completed' ' in %s seconds' % - (org_id, int(datetime.utcnow().timestamp()) - start_ts)) + (org_id, int(utcnow().timestamp()) - start_ts)) def process_task(self, body, message): try: diff --git a/docker_images/resource_discovery/Dockerfile b/docker_images/resource_discovery/Dockerfile index 83a1df10c..026726f40 100644 --- a/docker_images/resource_discovery/Dockerfile +++ b/docker_images/resource_discovery/Dockerfile @@ -6,6 +6,7 @@ ENV PYTHONPATH /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY tools/cloud_adapter tools/cloud_adapter +COPY tools/optscale_time tools/optscale_time COPY docker_images/resource_discovery/requirements.txt docker_images/resource_discovery/ RUN pip install --no-cache-dir -r docker_images/resource_discovery/requirements.txt diff --git a/docker_images/resource_discovery/requirements.txt b/docker_images/resource_discovery/requirements.txt index b9516bafa..d18334961 100644 --- a/docker_images/resource_discovery/requirements.txt +++ b/docker_images/resource_discovery/requirements.txt @@ -4,3 +4,4 @@ kombu==5.3.4 -e optscale_client/config_client -e optscale_client/rest_api_client -e tools/cloud_adapter +-e tools/optscale_time diff --git a/docker_images/resource_discovery/scheduler.py b/docker_images/resource_discovery/scheduler.py index 174693110..02eb59cf9 100644 --- a/docker_images/resource_discovery/scheduler.py +++ b/docker_images/resource_discovery/scheduler.py @@ -10,6 +10,7 @@ from tools.cloud_adapter.model import RES_MODEL_MAP from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow, utcnow_timestamp LOG = logging.getLogger(__name__) IGNORED_CLOUD_TYPES = ['environment'] @@ -89,7 +90,7 @@ def process(config_cl): secret=config_cl.cluster_secret()) _, response = rest_cl.organization_list({'with_connected_accounts': True}) tasks_map = defaultdict(list) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() _, _, _, observe_timeout = config_cl.resource_discovery_params() for organization in response['organizations']: try: @@ -117,8 +118,7 @@ def process(config_cl): if di_info['enabled']: rest_cl.discovery_info_update( di_info['id'], { - 'observe_time': int( - datetime.utcnow().timestamp())}) + 'observe_time': utcnow_timestamp()}) tasks_map[organization['id']].append( (ca['id'], di_info['resource_type'])) except requests.exceptions.HTTPError as ex: @@ -130,9 +130,9 @@ def process(config_cl): def main(config_client): - start_time = datetime.utcnow() + start_time = utcnow() tasks_map = process(config_client) - exec_time = (datetime.utcnow() - start_time).total_seconds() + exec_time = (utcnow() - start_time).total_seconds() if tasks_map: publish_tasks(config_client, tasks_map) LOG.info('Published %s tasks (%s seconds) for orgs: %s', diff --git a/docker_images/resource_discovery/worker.py b/docker_images/resource_discovery/worker.py index b3ca7d22c..fdc4cfa27 100644 --- a/docker_images/resource_discovery/worker.py +++ b/docker_images/resource_discovery/worker.py @@ -4,7 +4,6 @@ import traceback from concurrent.futures.thread import ThreadPoolExecutor -from datetime import datetime from threading import Event, Thread import queue from kombu.mixins import ConsumerMixin @@ -19,6 +18,7 @@ from tools.cloud_adapter.model import ResourceTypes, RES_MODEL_MAP from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow, utcnow_timestamp CHUNK_SIZE = 200 @@ -119,7 +119,7 @@ def build_payload(self, resource, resource_type): obj.pop('resource_id', None) obj.pop('organization_id', None) obj['resource_type'] = getattr(ResourceTypes, resource_type).value - obj['last_seen'] = int(datetime.utcnow().timestamp()) + obj['last_seen'] = utcnow_timestamp() obj['active'] = True return obj @@ -248,7 +248,7 @@ def is_404(exception): def _discover_resources(self, cloud_acc_id, resource_type): LOG.info('Starting %s discovery for cloud_account %s', resource_type, cloud_acc_id) - start_time = datetime.utcnow() + start_time = utcnow() if not self.check_discover_enabled(cloud_acc_id, resource_type): LOG.info('Discover of cloud account id %s for resource type %s is ' 'not enabled. Discover will be skipped.', cloud_acc_id, @@ -308,7 +308,7 @@ def _discover_resources(self, cloud_acc_id, resource_type): last_discovery_at=int(start_time.timestamp())) LOG.info('%s discovery for cloud_account %s completed in %s', resource_type, cloud_acc_id, - (datetime.utcnow() - start_time).total_seconds()) + (utcnow() - start_time).total_seconds()) def discover_resources(self, task): cloud_acc_id = task.get('cloud_account_id') @@ -320,7 +320,7 @@ def discover_resources(self, task): except Exception as ex: self._update_discovery_info( cloud_acc_id, resource_type, - last_error_at=int(datetime.utcnow().timestamp()), + last_error_at=utcnow_timestamp(), last_error=str(ex)[:255]) raise diff --git a/docker_images/slacker_executor/Dockerfile b/docker_images/slacker_executor/Dockerfile index 5985dc03b..42ba2dec4 100644 --- a/docker_images/slacker_executor/Dockerfile +++ b/docker_images/slacker_executor/Dockerfile @@ -7,6 +7,7 @@ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/auth_client optscale_client/auth_client COPY optscale_client/slacker_client optscale_client/slacker_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/slacker_executor/requirements.txt docker_images/slacker_executor/ RUN pip install --no-cache-dir -r docker_images/slacker_executor/requirements.txt diff --git a/docker_images/slacker_executor/requirements.txt b/docker_images/slacker_executor/requirements.txt index 582b50774..ff7e583d3 100644 --- a/docker_images/slacker_executor/requirements.txt +++ b/docker_images/slacker_executor/requirements.txt @@ -5,3 +5,4 @@ kombu==5.3.4 -e optscale_client/config_client -e optscale_client/rest_api_client -e optscale_client/slacker_client +-e tools/optscale_time diff --git a/docker_images/slacker_executor/worker.py b/docker_images/slacker_executor/worker.py index a2334c462..9cb234b5f 100644 --- a/docker_images/slacker_executor/worker.py +++ b/docker_images/slacker_executor/worker.py @@ -15,6 +15,7 @@ from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient from optscale_client.slacker_client.client import Client as SlackerClient +from tools.optscale_time import utcfromtimestamp, utcnow_timestamp LOG = get_logger(__name__) QUEUE_NAME = 'slacker-task' @@ -104,7 +105,7 @@ def get_consumers(self, consumer, channel): @staticmethod def ts_to_slacker_time_format(timestamp): if timestamp: - date = datetime.utcfromtimestamp(timestamp) + date = utcfromtimestamp(timestamp) return datetime.strftime(date, "%m/%d/%Y %H:%M UTC") else: return 'Not set' @@ -151,7 +152,7 @@ def resource_booking_status(self, current_booking): @staticmethod def get_current_booking(bookings): - now_ts = int(datetime.utcnow().timestamp()) + now_ts = utcnow_timestamp() for booking in bookings: if booking['acquired_since'] <= now_ts and ( booking['released_at'] == 0 or @@ -160,7 +161,7 @@ def get_current_booking(bookings): @staticmethod def get_upcoming_booking(bookings, current_booking=None): - acquired_since = int(datetime.utcnow().timestamp()) + acquired_since = utcnow_timestamp() if current_booking and current_booking.get('released_at'): acquired_since = current_booking['released_at'] future_bookings = [x for x in bookings diff --git a/docker_images/users_dataset_generator/users_dataset_generator.py b/docker_images/users_dataset_generator/users_dataset_generator.py index 609bb0431..d504a1fce 100644 --- a/docker_images/users_dataset_generator/users_dataset_generator.py +++ b/docker_images/users_dataset_generator/users_dataset_generator.py @@ -4,7 +4,7 @@ import re import os from collections import defaultdict -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from urllib import parse from sqlalchemy import create_engine @@ -188,7 +188,8 @@ def _get_checklist(mydb, org_id): def _get_expenses_by_clouds(ch_cl, cloud_account_ids): if not cloud_account_ids: return {} - start_date = datetime.utcnow() - timedelta(days=30) + start_date = datetime.now(tz=timezone.utc).replace( + tzinfo=None) - timedelta(days=30) query = """ SELECT cloud_account_id, diff --git a/docker_images/webhook_executor/Dockerfile b/docker_images/webhook_executor/Dockerfile index 3116def17..a59ef4da3 100644 --- a/docker_images/webhook_executor/Dockerfile +++ b/docker_images/webhook_executor/Dockerfile @@ -5,6 +5,7 @@ ENV PYTHONPATH /usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client +COPY tools/optscale_time tools/optscale_time COPY docker_images/webhook_executor/requirements.txt docker_images/webhook_executor/ RUN pip install --no-cache-dir -r docker_images/webhook_executor/requirements.txt diff --git a/docker_images/webhook_executor/requirements.txt b/docker_images/webhook_executor/requirements.txt index 55459bc9b..07fa09009 100644 --- a/docker_images/webhook_executor/requirements.txt +++ b/docker_images/webhook_executor/requirements.txt @@ -4,3 +4,4 @@ pymongo==4.6.3 # OptScale packages -e optscale_client/config_client -e optscale_client/rest_api_client +-e tools/optscale_time diff --git a/docker_images/webhook_executor/worker.py b/docker_images/webhook_executor/worker.py index 95346615c..b1634bdff 100644 --- a/docker_images/webhook_executor/worker.py +++ b/docker_images/webhook_executor/worker.py @@ -14,6 +14,7 @@ from kombu import Exchange, Queue, binding from optscale_client.config_client.client import Client as ConfigClient from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcnow, utcnow_timestamp QUEUE_NAME = 'webhook-task' @@ -141,7 +142,7 @@ def validate_enironment_webhook(self, webhook, meta_info): return True def execute_webhook(self, task): - start_time = datetime.utcnow() + start_time = utcnow() org_id = task.get('organization_id') object_id = task.get('object_id') action = task.get('action') @@ -205,13 +206,13 @@ def execute_webhook(self, task): 'headers': webhook['headers'], 'body': json.dumps(request_body), 'success': success, - 'execution_time': int(datetime.utcnow().timestamp()), + 'execution_time': utcnow_timestamp(), 'execution_result': '%s, %s' % (code, result) } self.mongo_cl.restapi.webhook_logs.insert_one(log_info) LOG.info('Webhook executor for %s (%s) completed in %s seconds', webhook['object_type'], webhook['object_id'], - (datetime.utcnow() - start_time).total_seconds()) + (utcnow() - start_time).total_seconds()) def process_task(self, body, message): try: diff --git a/gemini/gemini_worker/migrator.py b/gemini/gemini_worker/migrator.py index ca5928239..20f6e7efc 100644 --- a/gemini/gemini_worker/migrator.py +++ b/gemini/gemini_worker/migrator.py @@ -2,7 +2,7 @@ import hashlib import importlib import logging -from datetime import datetime +from datetime import datetime, timezone from clickhouse_driver import Client as ClickHouseClient LOG = logging.getLogger(__name__) @@ -115,7 +115,7 @@ def update_versions_table(self, filename): "version": self._get_version_from_name(filename), "md5": self._get_script_from_name(filename), "script": self._get_script_from_name(filename), - "created_at": datetime.utcnow(), + "created_at": datetime.now(tz=timezone.utc).replace(tzinfo=None), } ] self.clickhouse_client.execute( diff --git a/herald/herald_server/alembic/versions/000000000002_filter_criterias.py b/herald/herald_server/alembic/versions/000000000002_filter_criterias.py index 7231288df..1aa76c200 100644 --- a/herald/herald_server/alembic/versions/000000000002_filter_criterias.py +++ b/herald/herald_server/alembic/versions/000000000002_filter_criterias.py @@ -5,7 +5,7 @@ Create Date: 2018-03-19 14:36:42.383444 """ -from datetime import datetime +from datetime import datetime, timezone from alembic import op import sqlalchemy as sa @@ -21,7 +21,7 @@ def upgrade(): field_table = op.create_table( 'field', sa.Column('created_at', sa.Integer(), nullable=False, - default=datetime.utcnow().timestamp), + default=datetime.now(tz=timezone.utc).timestamp), sa.Column('deleted_at', sa.Integer(), nullable=False, default=0), sa.Column('id', sa.Integer(), nullable=False, autoincrement=True), sa.Column('name', sa.String(length=256), nullable=True), diff --git a/herald/herald_server/models/models.py b/herald/herald_server/models/models.py index 53137a779..081cb8356 100644 --- a/herald/herald_server/models/models.py +++ b/herald/herald_server/models/models.py @@ -1,6 +1,6 @@ # pylint: disable=abstract-method import json -from datetime import datetime +from datetime import datetime, timezone from sqlalchemy.ext.declarative.base import _declarative_constructor from sqlalchemy.ext.declarative import declarative_base, declared_attr @@ -249,7 +249,7 @@ def validator(self, value): def get_current_timestamp(): - return int(datetime.utcnow().timestamp()) + return int(datetime.now(tz=timezone.utc).timestamp()) class PermissionKeys(Enum): diff --git a/herald/herald_server/processors/main.py b/herald/herald_server/processors/main.py index 992f14dc5..e12772a85 100644 --- a/herald/herald_server/processors/main.py +++ b/herald/herald_server/processors/main.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone import json import logging import os @@ -90,7 +90,7 @@ def perform_email(self, email_task): def load_task(self, task, cleanup_on_fail=True): bucket, filename = task['download_url'].split('/') - report_path = 'task_%s' % int(datetime.utcnow().timestamp()) + report_path = 'task_%s' % int(datetime.now(tz=timezone.utc).timestamp()) LOG.info('loading %s from %s', bucket, filename) res = None diff --git a/herald/modules/template_generator/template_generator.py b/herald/modules/template_generator/template_generator.py index 0bd5a7871..c2e230c97 100644 --- a/herald/modules/template_generator/template_generator.py +++ b/herald/modules/template_generator/template_generator.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone # optscale images are also added to cdn, but not used in emails yet # 'background': 'https://cdn.hystax.com/OptScale/email-header-background.png' @@ -75,7 +75,8 @@ def get_default_template(): 'texts': { 'product': 'Hystax OptScale', 'dont_reply': 'Please do not reply to this email', - 'copyright': 'Copyright © 2016-%s' % datetime.utcnow().year, + 'copyright': 'Copyright © 2016-%s' % datetime.now( + tz=timezone.utc).year, 'address': '1250 Borregas Ave, Sunnyvale, CA 94089, USA', 'phone': '+1 628 251-1280' }, diff --git a/insider/insider_api/Dockerfile b/insider/insider_api/Dockerfile index f7c8267dc..173d1a030 100644 --- a/insider/insider_api/Dockerfile +++ b/insider/insider_api/Dockerfile @@ -8,6 +8,7 @@ COPY tools/optscale_exceptions tools/optscale_exceptions COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY tools/cloud_adapter tools/cloud_adapter +COPY tools/optscale_time tools/optscale_time COPY insider/insider_api/requirements.txt insider/insider_api/requirements.txt RUN pip install --no-cache-dir -r insider/insider_api/requirements.txt diff --git a/insider/insider_api/controllers/base.py b/insider/insider_api/controllers/base.py index 2e8b22a17..eb0a64a7a 100644 --- a/insider/insider_api/controllers/base.py +++ b/insider/insider_api/controllers/base.py @@ -5,7 +5,7 @@ from pymongo import MongoClient from tornado.ioloop import IOLoop from optscale_client.rest_api_client.client_v2 import Client as RestClient - +from tools.optscale_time import utcnow tp_executor = ThreadPoolExecutor(30) DEFAULT_CACHE_TIME = 60 * 60 * 12 @@ -27,7 +27,7 @@ def cached_cloud_call(self, fn, *args, **kwargs): 'kwargs': kwargs, } cache_filter = deepcopy(query_filter) - now = datetime.utcnow() + now = utcnow() cache_filter['updated_at'] = { '$gte': now - timedelta(seconds=DEFAULT_CACHE_TIME) } diff --git a/insider/insider_api/controllers/flavor_price.py b/insider/insider_api/controllers/flavor_price.py index 6e4efc4eb..d6f617986 100644 --- a/insider/insider_api/controllers/flavor_price.py +++ b/insider/insider_api/controllers/flavor_price.py @@ -14,7 +14,7 @@ from tools.optscale_exceptions.common_exc import WrongArgumentsException from botocore.exceptions import ClientError as AwsClientError from insider.insider_api.utils import handle_credentials_error - +from tools.optscale_time import utcnow LOG = logging.getLogger(__name__) @@ -130,7 +130,7 @@ def _load_flavor_prices(self, region, flavor, os_type, preinstalled=None, software = self.preinstalled_map.get( preinstalled.lower(), 'NA') if preinstalled else 'NA' - now = datetime.utcnow() + now = utcnow() query = { 'instanceType': flavor, 'location': location, @@ -166,7 +166,7 @@ def _load_family_prices(self, instance_family, region, os_type, currency): raise WrongArgumentsException(Err.OI0015, [os_type]) # TODO: Add currency support - now = datetime.utcnow() + now = utcnow() regex = re.compile(f"{instance_family}\\.", re.IGNORECASE) query = { 'instanceType': regex, @@ -277,7 +277,7 @@ def _load_flavor_prices(self, region, flavor, os_type, preinstalled=None, if operating_system not in {'windows', 'linux'}: raise WrongArgumentsException(Err.OI0015, [os_type]) - now = datetime.utcnow() + now = utcnow() product_name_regex = "Windows$" if operating_system == 'windows' else ".*(? MAX_UPDATE_THRESHOLD): + if (utcnow_timestamp() - self.body['last_update'] > + MAX_UPDATE_THRESHOLD): raise KataraTaskTimeoutError() super().execute() diff --git a/keeper/report_server/controllers/event.py b/keeper/report_server/controllers/event.py index 3250f7cbf..ba9db3b61 100644 --- a/keeper/report_server/controllers/event.py +++ b/keeper/report_server/controllers/event.py @@ -1,7 +1,7 @@ # pylint: disable=no-member import logging import hashlib -from datetime import datetime +from datetime import datetime, timezone from mongoengine.errors import ValidationError, DoesNotExist from mongoengine.queryset.visitor import Q @@ -215,7 +215,7 @@ def _publish_event(self, **kwargs): def submit(self, **kwargs): # TODO: possible filter kwargs/filter unexpected - kwargs["time"] = int(datetime.utcnow().timestamp()) + kwargs["time"] = int(datetime.now(tz=timezone.utc).timestamp()) event = Event(**kwargs) try: event.save() diff --git a/keeper/report_server/controllers/feedback.py b/keeper/report_server/controllers/feedback.py index 2cf64e70e..c7606671a 100644 --- a/keeper/report_server/controllers/feedback.py +++ b/keeper/report_server/controllers/feedback.py @@ -1,6 +1,6 @@ import logging import json -from datetime import datetime +from datetime import datetime, timezone from mongoengine.queryset.visitor import Q from mongoengine.errors import ValidationError @@ -42,7 +42,7 @@ def submit(self, **kwargs): if token: user_id = self.get_user_id_by_token(token) kwargs.update({"user_id": user_id}) - kwargs["time"] = int(datetime.utcnow().timestamp()) + kwargs["time"] = int(datetime.now(tz=timezone.utc).timestamp()) metadata = kwargs.get("metadata") if metadata: _check_filter_json(metadata, "metadata") diff --git a/metroculus/metroculus_worker/Dockerfile b/metroculus/metroculus_worker/Dockerfile index ce01a4f90..52af38f50 100644 --- a/metroculus/metroculus_worker/Dockerfile +++ b/metroculus/metroculus_worker/Dockerfile @@ -7,6 +7,7 @@ ENV PYTHONPATH=/usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY tools/cloud_adapter tools/cloud_adapter +COPY tools/optscale_time tools/optscale_time COPY metroculus/metroculus_worker/requirements.txt metroculus/metroculus_worker/requirements.txt RUN pip install --no-cache-dir -r metroculus/metroculus_worker/requirements.txt diff --git a/metroculus/metroculus_worker/main.py b/metroculus/metroculus_worker/main.py index 3aa6bf061..4280c5589 100644 --- a/metroculus/metroculus_worker/main.py +++ b/metroculus/metroculus_worker/main.py @@ -1,6 +1,5 @@ #!/usr/bin/env python import os -from datetime import datetime from etcd import Lock as EtcdLock from kombu.mixins import ConsumerMixin from kombu.log import get_logger @@ -11,7 +10,7 @@ from metroculus.metroculus_worker.processor import MetricsProcessor from optscale_client.config_client.client import Client as ConfigClient - +from tools.optscale_time import utcnow EXCHANGE_NAME = 'metroculus-tasks' QUEUE_NAME = 'metroculus-task' @@ -30,7 +29,7 @@ def get_consumers(self, consumer, channel): callbacks=[self.process_task], prefetch_count=10)] def _process_task(self, task): - start_process_time = datetime.utcnow() + start_process_time = utcnow() cloud_account_id = task.get('cloud_account_id') processor = MetricsProcessor(self.config_cl, cloud_account_id) try: @@ -39,7 +38,7 @@ def _process_task(self, task): 'Metrics received for cloud_account %s (%s resources). ' 'The processing took %s seconds' % ( cloud_account_id, len(result), - (datetime.utcnow() - start_process_time).total_seconds())) + (utcnow() - start_process_time).total_seconds())) except Exception as exc: processor.update_getting_metrics_attempt(error=str(exc)) raise diff --git a/metroculus/metroculus_worker/migrator.py b/metroculus/metroculus_worker/migrator.py index a1eec7d2b..45c39fc7c 100644 --- a/metroculus/metroculus_worker/migrator.py +++ b/metroculus/metroculus_worker/migrator.py @@ -2,7 +2,7 @@ import hashlib import importlib import logging -from datetime import datetime +from datetime import datetime, timezone from clickhouse_driver import Client as ClickHouseClient LOG = logging.getLogger(__name__) @@ -100,7 +100,7 @@ def update_versions_table(self, filename): 'version': self._get_version_from_name(filename), 'md5': self._get_script_from_name(filename), 'script': self._get_script_from_name(filename), - 'created_at': datetime.utcnow() + 'created_at': datetime.now(tz=timezone.utc).replace(tzinfo=None) }] self.clickhouse_client.execute( f"INSERT INTO {VERSIONS_TABLE} VALUES", version) diff --git a/metroculus/metroculus_worker/processor.py b/metroculus/metroculus_worker/processor.py index 4a6e974ca..38fd0e087 100644 --- a/metroculus/metroculus_worker/processor.py +++ b/metroculus/metroculus_worker/processor.py @@ -7,6 +7,7 @@ from clickhouse_driver import Client as ClickHouseClient from optscale_client.rest_api_client.client_v2 import Client as RestClient from tools.cloud_adapter.cloud import Cloud as CloudAdapter +from tools.optscale_time import utcfromtimestamp, utcnow LOG = get_logger(__name__) K8S_RESOURCE_TYPE = 'K8s Pod' @@ -177,7 +178,7 @@ def update_getting_metrics_attempt(self, ts=None, error=None): def start(self): LOG.info('Starting getting metrics ' 'for cloud account %s' % self.cloud_account_id) - now = datetime.utcnow() + now = utcnow() _, cloud_account = self.rest_client.cloud_account_get( self.cloud_account_id) start_period = now - timedelta(days=30) @@ -250,7 +251,7 @@ def start(self): )] = cloud_resource_ids else: for r_id, resource in resource_map.items(): - last_seen = datetime.utcfromtimestamp(resource['last_seen']) + last_seen = utcfromtimestamp(resource['last_seen']) last_metric_date = resource_metric_dates_map.get( r_id, datetime.fromtimestamp(0)) start_date = max(last_metric_date, start_period) + timedelta( diff --git a/metroculus/metroculus_worker/requirements.txt b/metroculus/metroculus_worker/requirements.txt index eccbcbb64..087a40af8 100644 --- a/metroculus/metroculus_worker/requirements.txt +++ b/metroculus/metroculus_worker/requirements.txt @@ -6,3 +6,4 @@ pymongo==4.6.3 -e optscale_client/config_client -e optscale_client/rest_api_client -e tools/cloud_adapter +-e tools/optscale_time diff --git a/rest_api/Dockerfile b/rest_api/Dockerfile index 600ad58d1..f6fff07f4 100644 --- a/rest_api/Dockerfile +++ b/rest_api/Dockerfile @@ -6,6 +6,7 @@ ENV PYTHONPATH /usr/src/app/ COPY tools/cloud_adapter tools/cloud_adapter COPY tools/optscale_exceptions tools/optscale_exceptions COPY tools/optscale_types tools/optscale_types +COPY tools/optscale_time tools/optscale_time COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/auth_client optscale_client/auth_client diff --git a/rest_api/requirements.txt b/rest_api/requirements.txt index 784c898cf..75a15094a 100644 --- a/rest_api/requirements.txt +++ b/rest_api/requirements.txt @@ -23,6 +23,7 @@ currency-symbols==2.0.3 -e tools/cloud_adapter -e tools/optscale_exceptions -e tools/optscale_types +-e tools/optscale_time -e optscale_client/config_client -e optscale_client/auth_client -e optscale_client/herald_client diff --git a/rest_api/rest_api_server/alembic/versions/a9ee3d861023_databricks_cloud_type.py b/rest_api/rest_api_server/alembic/versions/a9ee3d861023_databricks_cloud_type.py index d5a994b14..9908711a3 100644 --- a/rest_api/rest_api_server/alembic/versions/a9ee3d861023_databricks_cloud_type.py +++ b/rest_api/rest_api_server/alembic/versions/a9ee3d861023_databricks_cloud_type.py @@ -7,8 +7,7 @@ """ from alembic import op import sqlalchemy as sa -from datetime import datetime - +from datetime import datetime, timezone # revision identifiers, used by Alembic. revision = 'a9ee3d861023' @@ -39,7 +38,8 @@ def downgrade(): sa.sql.column('deleted_at', sa.Integer())) op.execute( ct.update().where(ct.c.type.in_(['DATABRICKS'])).values( - type='ENVIRONMENT', deleted_at=int(datetime.utcnow().timestamp()) + type='ENVIRONMENT', deleted_at=int(datetime.now( + tz=timezone.utc).timestamp()) ) ) op.alter_column('cloudaccount', 'type', existing_type=new_cloud_types, @@ -49,7 +49,8 @@ def downgrade(): sa.sql.column('deleted_at', sa.Integer())) op.execute( ct.update().where(ct.c.type.in_(['SKU'])).values( - type='CLOUD_ACCOUNT', deleted_at=int(datetime.utcnow().timestamp()) + type='CLOUD_ACCOUNT', deleted_at=int(datetime.now( + tz=timezone.utc).timestamp()) ) ) op.alter_column('cost_model', 'type', existing_type=new_cost_models_types, diff --git a/rest_api/rest_api_server/controllers/assignment.py b/rest_api/rest_api_server/controllers/assignment.py index f6d379fcd..3dd202876 100644 --- a/rest_api/rest_api_server/controllers/assignment.py +++ b/rest_api/rest_api_server/controllers/assignment.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +import tools.optscale_time as opttime from requests import HTTPError from sqlalchemy.sql import and_ @@ -93,7 +93,7 @@ def get_active_requests(self, res_ids): return {request.resource_id: request for request in res} def invalidate_requests(self, res_ids): - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() resource_request_map = self.get_active_requests(res_ids) requests = list(resource_request_map.values()) for request in requests: diff --git a/rest_api/rest_api_server/controllers/assignment_request.py b/rest_api/rest_api_server/controllers/assignment_request.py index 429427d78..9e81b7cbf 100644 --- a/rest_api/rest_api_server/controllers/assignment_request.py +++ b/rest_api/rest_api_server/controllers/assignment_request.py @@ -1,5 +1,6 @@ import logging +import tools.optscale_time as opttime from datetime import datetime from sqlalchemy.sql import and_, or_ @@ -120,7 +121,7 @@ def _get_assignment_request_by_id(self, request_id, user_id): def _finish_request(self, request, target_request_status): self.update(request.id, status=target_request_status, - deleted_at=int(datetime.utcnow().timestamp())) + deleted_at=opttime.utcnow_timestamp()) def list_assignment_requests(self, user_id, organization_id=None, req_type=None): diff --git a/rest_api/rest_api_server/controllers/base.py b/rest_api/rest_api_server/controllers/base.py index d57f65a7b..91f01c883 100644 --- a/rest_api/rest_api_server/controllers/base.py +++ b/rest_api/rest_api_server/controllers/base.py @@ -1,8 +1,8 @@ import hashlib import logging import threading +import tools.optscale_time as opttime from clickhouse_driver import Client as ClickHouseClient -from datetime import datetime from kombu import Connection as QConnection, Exchange from kombu.pools import producers from pymongo import MongoClient @@ -452,7 +452,7 @@ def hard_delete(self, item_id): def delete(self, item_id): LOG.info("Deleting %s with id %s", self.model_type.__name__, item_id) return self.update( - item_id, deleted_at=int(datetime.utcnow().timestamp())) + item_id, deleted_at=opttime.utcnow_timestamp()) def update(self, item_id, **kwargs): try: diff --git a/rest_api/rest_api_server/controllers/breakdown_expense.py b/rest_api/rest_api_server/controllers/breakdown_expense.py index 64e0bdf8f..c8c83319b 100644 --- a/rest_api/rest_api_server/controllers/breakdown_expense.py +++ b/rest_api/rest_api_server/controllers/breakdown_expense.py @@ -6,6 +6,7 @@ from rest_api.rest_api_server.exceptions import Err from tools.optscale_exceptions.common_exc import WrongArgumentsException +from tools.optscale_time import utcfromtimestamp LOG = logging.getLogger(__name__) DAY_IN_SECONDS = 86400 @@ -45,10 +46,10 @@ def update_unique_values(self, unique_values, entities): @staticmethod def _get_breakdown_dates(start_date, end_date): - first_breakdown = int(datetime.utcfromtimestamp(start_date).replace( + first_breakdown = int(utcfromtimestamp(start_date).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc ).timestamp()) - last_breakdown = int(datetime.utcfromtimestamp(end_date).replace( + last_breakdown = int(utcfromtimestamp(end_date).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc ).timestamp()) return [x for x in range(first_breakdown, last_breakdown + 1, @@ -84,9 +85,9 @@ def __init__(self, *args, **kwargs): def update_params(**params): start_date = params.get('start_date') end_date = params.get('end_date') - start_dt = datetime.utcfromtimestamp(start_date).replace( + start_dt = utcfromtimestamp(start_date).replace( hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc) - end_dt = datetime.utcfromtimestamp(end_date).replace( + end_dt = utcfromtimestamp(end_date).replace( hour=23, minute=59, second=59, microsecond=0, tzinfo=timezone.utc) params.update({ 'start_date': int(start_dt.timestamp()), @@ -160,9 +161,9 @@ def _get_base_result(self, breakdown_by, breakdown_expenses, entities_map): } breakdown = defaultdict(dict) counts = {} - previous_period_dt = datetime.utcfromtimestamp( + previous_period_dt = utcfromtimestamp( self.previous_period_start) - start_dt = datetime.utcfromtimestamp(self.start_date) + start_dt = utcfromtimestamp(self.start_date) for breakdown_date, day_info in breakdown_expenses.items(): for k, cost in day_info.items(): if k not in counts: @@ -236,8 +237,8 @@ def get_breakdown_expenses(self, cloud_account_ids, resources): {'id': k, 'group_field': v} for k, v in resources.items() ] - start_dt = datetime.utcfromtimestamp(self.previous_period_start) - end_dt = datetime.utcfromtimestamp(self.end_date) + start_dt = utcfromtimestamp(self.previous_period_start) + end_dt = utcfromtimestamp(self.end_date) expenses = self.execute_clickhouse( query=""" SELECT diff --git a/rest_api/rest_api_server/controllers/calendar_observer.py b/rest_api/rest_api_server/controllers/calendar_observer.py index 7f542f30b..fb2473013 100644 --- a/rest_api/rest_api_server/controllers/calendar_observer.py +++ b/rest_api/rest_api_server/controllers/calendar_observer.py @@ -1,4 +1,5 @@ import logging +import tools.optscale_time as opttime from datetime import datetime from sqlalchemy import false, and_ @@ -141,7 +142,7 @@ def _observe(self, calendar_sync): creates_list, calendar_sync.calendar_id) except CalendarException as ex: raise FailedDependency(Err.OE0489, [str(ex)]) - calendar_sync.last_completed = int(datetime.utcnow().timestamp()) + calendar_sync.last_completed = opttime.utcnow_timestamp() self.session.commit() diff --git a/rest_api/rest_api_server/controllers/calendar_synchronization.py b/rest_api/rest_api_server/controllers/calendar_synchronization.py index 1a5ac889a..ebd2188cb 100644 --- a/rest_api/rest_api_server/controllers/calendar_synchronization.py +++ b/rest_api/rest_api_server/controllers/calendar_synchronization.py @@ -1,5 +1,5 @@ import logging - +import tools.optscale_time as opttime from datetime import datetime, timedelta from sqlalchemy.exc import IntegrityError @@ -180,7 +180,7 @@ def get_service_account(self): def list_calendar_events(self, calendar_id, time_min=None, time_max=None, updated_min=None, reraise=False): if updated_min: - updated_min_limit = datetime.utcnow() - timedelta(days=UPDATED_MIN_DAYS) + updated_min_limit = opttime.utcnow() - timedelta(days=UPDATED_MIN_DAYS) updated_min = max(updated_min, updated_min_limit) try: events = self.google_calendar_cl.list_events( @@ -201,7 +201,7 @@ def get_event_template( shareable_booking.acquired_since).replace( hour=23, minute=59, second=0, microsecond=0) + end_boost if event: - today = datetime.utcnow().replace( + today = opttime.utcnow().replace( hour=23, minute=59, second=0, microsecond=0) end_border = today + timedelta(days=180) if event['end'] < end_border: @@ -222,7 +222,7 @@ def get_event_template( } def _check_calendar_availability(self, organization_id, calendar_id): - now = datetime.utcnow() + now = opttime.utcnow() test_template = { 'calendar_id': calendar_id, 'start': now, diff --git a/rest_api/rest_api_server/controllers/cloud_account.py b/rest_api/rest_api_server/controllers/cloud_account.py index 059adcaa2..06c6debf0 100644 --- a/rest_api/rest_api_server/controllers/cloud_account.py +++ b/rest_api/rest_api_server/controllers/cloud_account.py @@ -1,6 +1,7 @@ import logging import re -from datetime import datetime, timedelta +import tools.optscale_time as opttime +from datetime import timedelta from calendar import monthrange from optscale_client.herald_client.client_v2 import Client as HeraldClient @@ -356,7 +357,7 @@ def create(self, **kwargs): cloud_pool = PoolController(self.session, self._config, self.token).create( organization_id=cloud_account_org_id, parent_id=parent_pool.id, name=pool_name, default_owner_id=default_employee.id) - rule_name = 'Rule for %s_%s' % (ca_obj.name, int(datetime.utcnow().timestamp())) + rule_name = 'Rule for %s_%s' % (ca_obj.name, opttime.utcnow_timestamp()) RuleController(self.session, self._config, self.token).create_rule( auth_user_id, cloud_pool.organization_id, self.token, name=rule_name, owner_id=cloud_pool.default_owner_id, @@ -588,7 +589,7 @@ def delete(self, item_id): self.send_cloud_account_email(cloud_account, action='deleted') def get_details(self, cloud_acc_id): - today = datetime.utcnow() + today = opttime.utcnow() expense_ctrl = ExpenseController(self._config) default = {'cost': 0, 'count': 0, 'types': []} month_expenses = self._get_this_month_expenses( @@ -704,7 +705,7 @@ def list(self, details=False, secure=True, only_linked=None, type=None, cloud_acc_ids = [x.id for x in cloud_accounts] - today = datetime.utcnow() + today = opttime.utcnow() expense_ctrl = ExpenseController(self._config) month_expenses = self._get_this_month_expenses( expense_ctrl, today, cloud_acc_ids diff --git a/rest_api/rest_api_server/controllers/cloud_resource.py b/rest_api/rest_api_server/controllers/cloud_resource.py index 8abe950f2..e2fb19782 100644 --- a/rest_api/rest_api_server/controllers/cloud_resource.py +++ b/rest_api/rest_api_server/controllers/cloud_resource.py @@ -1,5 +1,6 @@ import logging import uuid +import tools.optscale_time as opttime from datetime import datetime, timedelta from collections import defaultdict from sqlalchemy.exc import IntegrityError @@ -111,7 +112,7 @@ def delete_bookings(self, item_id): if booking_ids: self.session.query(ShareableBooking).filter( ShareableBooking.id.in_(booking_ids)).update( - {ShareableBooking.deleted_at: int(datetime.utcnow().timestamp())}, + {ShareableBooking.deleted_at: opttime.utcnow_timestamp()}, synchronize_session=False) try: self.session.commit() @@ -138,7 +139,6 @@ def delete_bookings(self, item_id): 'Error deleting event calendar booking: %s', str(exc)) def edit(self, item_id, **kwargs): - now_ts = int(datetime.utcnow().timestamp()) self.check_restrictions(is_new=False, **kwargs) if kwargs.get('employee_id'): self.check_entity_exists(kwargs['employee_id'], Employee) @@ -398,7 +398,7 @@ def get_summary(self, resource, dependent_ids, expense_ctrl): def _get_active_resource_bookings(self, resource): bookings_list = [] - now = datetime.utcnow() + now = opttime.utcnow() bookings = self.session.query(ShareableBooking).filter(and_( ShareableBooking.resource_id == resource['id'], ShareableBooking.deleted_at == 0, or_( @@ -415,7 +415,7 @@ def _get_active_resource_bookings(self, resource): return bookings_list def get_resource_details(self, resource): - now = datetime.utcnow() + now = opttime.utcnow() cloud_account = self.session.query(CloudAccount).filter( CloudAccount.id == resource.get('cloud_account_id') ).one_or_none() @@ -650,7 +650,7 @@ def _set_ip_address_updated_fields(self, resource, db_resource): @staticmethod def _set_resource_date(date_field, meta, resource, status_field_value): if status_field_value: - date_value = int(datetime.utcnow().timestamp()) + date_value = opttime.utcnow_timestamp() else: date_value = meta.get(date_field, 0) if not resource.get('meta'): @@ -803,7 +803,7 @@ def _save_bulk( cloud_account_id, resources, include_deleted, unique=False, unique_field_name=False) self._env_changes_notify(db_resources_map, cloud_account) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() unique_resources_map = self.get_resources_by_hash_or_id( cloud_account_id, resources, include_deleted, unique=True, unique_field_name=True) @@ -984,13 +984,13 @@ def delete(self, item_id): filter={ '_id': item_id }, - update={'$set': {'deleted_at': int(datetime.utcnow().timestamp())}} + update={'$set': {'deleted_at': opttime.utcnow_timestamp()}} ) if not r.modified_count: raise NotFoundException(Err.OE0002, ['Resource', item_id]) def delete_cloud_resources(self, cloud_account_id): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() chunk_size = 10000 while True: res = self.resources_collection.find( @@ -1048,7 +1048,7 @@ def update_history_properties( 'id') else resource['_id'] self.property_history_collection.insert_one({ 'resource_id': resource_id, - 'time': int(datetime.utcnow().timestamp()), + 'time': opttime.utcnow_timestamp(), 'changes': changes }) except BulkWriteError as ex: diff --git a/rest_api/rest_api_server/controllers/cluster_type.py b/rest_api/rest_api_server/controllers/cluster_type.py index fb65a7089..4305633e1 100644 --- a/rest_api/rest_api_server/controllers/cluster_type.py +++ b/rest_api/rest_api_server/controllers/cluster_type.py @@ -1,7 +1,7 @@ import logging import uuid +import tools.optscale_time as opttime -from datetime import datetime from pymongo import ReturnDocument, UpdateMany, UpdateOne from retrying import retry from sqlalchemy import exists @@ -112,7 +112,7 @@ def delete_clusters(self, cluster_type): }, update={ '$unset': {'active': 1}, - '$set': {'deleted_at': int(datetime.utcnow().timestamp())} + '$set': {'deleted_at': opttime.utcnow_timestamp()} } ) if updated_resources.modified_count > 0: @@ -130,7 +130,7 @@ def delete_clusters(self, cluster_type): @retry(**RETRIES) def delete(self, item_id): - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() cluster_type = self.get(item_id) all_cluster_types = self._get_all_cluster_types( cluster_type.organization_id) @@ -422,7 +422,7 @@ def delete_resources_constraints(self, organization_id, def delete_shareable_booking(self, clustered_resource_ids, no_commit=True): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() self.session.query(ShareableBooking).filter(and_( ShareableBooking.deleted.is_(False), ShareableBooking.resource_id.in_( @@ -478,7 +478,7 @@ def reapply_clusters(self, organization_id, user_info): for k in tag_cluster_type_map.keys()] }) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() total_count = 0 cluster_cid_resources_map = {} cluster_cid_cluster_type_map = {} @@ -614,7 +614,7 @@ def reapply_clusters(self, organization_id, user_info): def delete_empty_clusters(self, organization_id, cloud_account_ids): cluster_ids = self.resources_collection.distinct( 'cluster_id', {'cloud_account_id': {'$in': cloud_account_ids}}) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() self.resources_collection.update_many( filter={ '_id': {'$nin': cluster_ids}, diff --git a/rest_api/rest_api_server/controllers/constraint_base.py b/rest_api/rest_api_server/controllers/constraint_base.py index 5c782b626..0d49e32b2 100644 --- a/rest_api/rest_api_server/controllers/constraint_base.py +++ b/rest_api/rest_api_server/controllers/constraint_base.py @@ -1,5 +1,6 @@ import json import logging +import tools.optscale_time as opttime from datetime import datetime, time, timedelta from sqlalchemy.sql import and_, exists @@ -105,7 +106,7 @@ def get_violations(self, org_id, resource_data_map, now): def get_resource_hit_value(resource_data, constraint_type, now): limit_value_map = { ConstraintTypes.TTL: (now - int(resource_data.get( - 'mindate', datetime.utcnow()).timestamp())) // 3600, + 'mindate', opttime.utcnow()).timestamp())) // 3600, ConstraintTypes.TOTAL_EXPENSE_LIMIT: resource_data.get( 'total_cost', 0), ConstraintTypes.DAILY_EXPENSE_LIMIT: resource_data.get( diff --git a/rest_api/rest_api_server/controllers/discovery_info_bulk.py b/rest_api/rest_api_server/controllers/discovery_info_bulk.py index 70a6825f8..79e9ed20d 100644 --- a/rest_api/rest_api_server/controllers/discovery_info_bulk.py +++ b/rest_api/rest_api_server/controllers/discovery_info_bulk.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +import tools.optscale_time as opttime from tools.optscale_exceptions.common_exc import ( ConflictException, WrongArgumentsException) from rest_api.rest_api_server.controllers.discovery_info import DiscoveryInfoController @@ -50,7 +50,7 @@ def create(self, cloud_account_id, **kwargs): def delete(self, cloud_account_id, **kwargs): self.check_cloud_acc_and_org(cloud_account_id) discovery_infos_ids = kwargs['discovery_info'] - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() self.session.query(DiscoveryInfo).filter( DiscoveryInfo.id.in_(discovery_infos_ids), DiscoveryInfo.deleted.is_(False) diff --git a/rest_api/rest_api_server/controllers/environment_resource.py b/rest_api/rest_api_server/controllers/environment_resource.py index 6a0cf59e4..af2a8ead0 100644 --- a/rest_api/rest_api_server/controllers/environment_resource.py +++ b/rest_api/rest_api_server/controllers/environment_resource.py @@ -1,6 +1,6 @@ import hashlib import logging -from datetime import datetime +import tools.optscale_time as opttime from sqlalchemy import and_ @@ -204,7 +204,7 @@ def delete(self, item_id): '_id': item_id, 'is_environment': True }, - update={'$set': {'deleted_at': int(datetime.utcnow().timestamp())}} + update={'$set': {'deleted_at': opttime.utcnow_timestamp()}} ) diff --git a/rest_api/rest_api_server/controllers/expense.py b/rest_api/rest_api_server/controllers/expense.py index b0230e1cb..6cb5edab0 100644 --- a/rest_api/rest_api_server/controllers/expense.py +++ b/rest_api/rest_api_server/controllers/expense.py @@ -19,6 +19,7 @@ ResourceFormatMixin) from tools.cloud_adapter.cloud import Cloud as CloudAdapter +from tools.optscale_time import utcfromtimestamp, utcnow LOG = logging.getLogger(__name__) NOT_SET_NAME = '(not set)' @@ -206,7 +207,7 @@ def _get_first_cloud_account_expense(self, cloud_account_ids, date, ) def get_first_expenses_for_forecast(self, field, values): - prev_month_start = (datetime.utcnow().replace(day=1) - timedelta( + prev_month_start = (utcnow().replace(day=1) - timedelta( days=1)).replace(day=1, hour=0, minute=0, second=0, microsecond=0) if field in ['cloud_account_id']: result = self._get_first_cloud_account_expense( @@ -312,10 +313,10 @@ def _update_summary(resources_map, traffic_expenses): total_cost = resource.get('total_cost', 0) res_created = datetime.fromtimestamp(resource.pop('created_at', 0)) mindate_ts = resource.get('first_seen') - mindate = datetime.utcfromtimestamp( + mindate = utcfromtimestamp( mindate_ts) if mindate_ts else res_created maxdate_ts = resource.get('last_seen') - maxdate = datetime.utcfromtimestamp( + maxdate = utcfromtimestamp( maxdate_ts) if maxdate_ts else res_created resource.update({ '_id': {'resource_id': r_id}, diff --git a/rest_api/rest_api_server/controllers/infrastructure/base.py b/rest_api/rest_api_server/controllers/infrastructure/base.py index 4584b2061..e9d505598 100644 --- a/rest_api/rest_api_server/controllers/infrastructure/base.py +++ b/rest_api/rest_api_server/controllers/infrastructure/base.py @@ -17,6 +17,7 @@ from optscale_client.insider_client.client import Client as InsiderClient from tools.optscale_exceptions.common_exc import ( NotFoundException, WrongArgumentsException) +from tools.optscale_time import utcnow_timestamp CLOUD_TYPE_TO_CLOUD_MAP = { 'aws_cnr': 'aws' @@ -109,7 +110,7 @@ def _get_usage( self, runners: list[dict], cloud_account: CloudAccount ) -> tuple[dict[str, float], dict[str, int]]: - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() filters = [] min_dt, max_dt = None, None duration_map = {} diff --git a/rest_api/rest_api_server/controllers/infrastructure/run.py b/rest_api/rest_api_server/controllers/infrastructure/run.py index 98e7da84a..e6c34f546 100644 --- a/rest_api/rest_api_server/controllers/infrastructure/run.py +++ b/rest_api/rest_api_server/controllers/infrastructure/run.py @@ -11,6 +11,7 @@ from rest_api.rest_api_server.models.models import CloudAccount from tools.optscale_exceptions.common_exc import NotFoundException +from tools.optscale_time import utcnow_timestamp class RunController(BaseInfraController): @@ -54,7 +55,7 @@ def format_run( run['status'] = RunStates(state).name finish = run.get('finish') if not finish and state == RunStates.running: - finish = datetime.utcnow().timestamp() + finish = utcnow_timestamp() duration = finish - run.get('start') run['runset'] = self.format_runset( run.pop('runset_id'), run.pop('runset_name', None)) @@ -80,7 +81,7 @@ def _get_executors_usage( if not executors: # no executors created. Nothing to count return {} - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() started_at = runset['started_at'] destroyed_at = runset.get('destroyed_at') or now started_at = datetime.fromtimestamp(started_at).replace( diff --git a/rest_api/rest_api_server/controllers/infrastructure/runset.py b/rest_api/rest_api_server/controllers/infrastructure/runset.py index f50da649b..94cb37166 100644 --- a/rest_api/rest_api_server/controllers/infrastructure/runset.py +++ b/rest_api/rest_api_server/controllers/infrastructure/runset.py @@ -13,7 +13,7 @@ from tools.optscale_exceptions.common_exc import ( NotFoundException, WrongArgumentsException, ConflictException) - +from tools.optscale_time import utcnow_timestamp MIN_CPU_COUNT = 4 STOP_ACTION = 'stop' @@ -45,7 +45,7 @@ def format_template(template: dict) -> dict: def get_runners_duration(runners: list[dict]) -> int: min_started_at = 0 max_destroyed_at = 0 - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() for runner in runners: if not runner.get('started_at'): # we need start point to calculate something diff --git a/rest_api/rest_api_server/controllers/invite.py b/rest_api/rest_api_server/controllers/invite.py index 3758817cd..ca0d50de1 100644 --- a/rest_api/rest_api_server/controllers/invite.py +++ b/rest_api/rest_api_server/controllers/invite.py @@ -3,7 +3,7 @@ import logging import requests import uuid - +import tools.optscale_time as opttime from etcd import EtcdKeyNotFound from rest_api.rest_api_server.controllers.base import BaseController @@ -127,7 +127,7 @@ def get_highest_role(current, new): owner_id=user_id, meta=json.dumps(meta), ttl=int( - datetime.datetime.utcnow().timestamp() + + opttime.utcnow_timestamp() + datetime.timedelta( days=invite_expiration_days ).total_seconds())) @@ -196,12 +196,12 @@ def decline_invite(self, invite_id, user_info): def delete_invite(self, invite): for invite_assignment in invite.invite_assignments: - invite_assignment.deleted_at = int(datetime.datetime.utcnow().timestamp()) + invite_assignment.deleted_at = opttime.utcnow_timestamp() super().delete(invite.id) def list(self, user_id, user_info): invites = super().list(email=user_info['email']) - now = int(datetime.datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() result = [] for invite in invites: if invite.ttl <= now: @@ -223,7 +223,7 @@ def _get_org_by_scope(scope): def _get_invite_for_email(self, invite_id, email): invite = super().get(item_id=invite_id, email=email) - now = int(datetime.datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() if not invite or invite.ttl <= now: if invite: self.update(invite_id, deleted_at=now) diff --git a/rest_api/rest_api_server/controllers/jira_issue_attachment.py b/rest_api/rest_api_server/controllers/jira_issue_attachment.py index 0bd201b38..84bf6a7a9 100644 --- a/rest_api/rest_api_server/controllers/jira_issue_attachment.py +++ b/rest_api/rest_api_server/controllers/jira_issue_attachment.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +import tools.optscale_time as opttime from rest_api.rest_api_server.controllers.base import BaseController, MongoMixin from rest_api.rest_api_server.controllers.shareable_resource import ( ShareableBookingController) @@ -99,7 +99,7 @@ def delete(self, item_id, user_id, shareable_booking=None): if not shareable_booking: self.validate_shareable_booking( attachment.shareable_booking_id, user_id) - attachment.deleted_at = int(datetime.utcnow().timestamp()) + attachment.deleted_at = opttime.utcnow_timestamp() self.shareable_booking_controller.check_autorelease( attachment.shareable_booking_id) self.session.commit() diff --git a/rest_api/rest_api_server/controllers/limit_hit.py b/rest_api/rest_api_server/controllers/limit_hit.py index c8f195ebb..619e3efb4 100644 --- a/rest_api/rest_api_server/controllers/limit_hit.py +++ b/rest_api/rest_api_server/controllers/limit_hit.py @@ -1,6 +1,6 @@ from datetime import datetime import logging - +import tools.optscale_time as opttime from sqlalchemy import and_ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.sql import func @@ -323,7 +323,7 @@ def set_limits_in_green_state(self, not_violated_limit_hits, def process_resources(self, organization_id, resources): if not resources: return [] - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() resource_data_map = self.collect_processing_data(resources) resource_ids = list(resource_data_map.keys()) resource_limit_hit_map = { diff --git a/rest_api/rest_api_server/controllers/live_demo.py b/rest_api/rest_api_server/controllers/live_demo.py index 182de8cc1..70da711a7 100644 --- a/rest_api/rest_api_server/controllers/live_demo.py +++ b/rest_api/rest_api_server/controllers/live_demo.py @@ -5,6 +5,7 @@ import os import random import uuid +import tools.optscale_time as opttime from collections import defaultdict from json.decoder import JSONDecodeError from kombu.pools import producers @@ -689,7 +690,7 @@ def build_resource(self, obj, objects_group, now, organization_id, **kwargs): now, obj) if obj.get('active', False): obj['last_seen'] = int(( - datetime.utcnow() + timedelta(days=7)).timestamp()) + opttime.utcnow() + timedelta(days=7)).timestamp()) obj['_last_seen_date'] = timestamp_to_day_start(obj.get('last_seen', 0)) obj['_first_seen_date'] = timestamp_to_day_start( obj.get('first_seen', 0)) @@ -1279,8 +1280,7 @@ def fill_organization( self, organization: Organization, token: ProfilingToken, src_replace_employee, dest_replace_employee, preset ): - now = int(datetime.utcnow().replace( - hour=0, minute=0, second=0).timestamp()) + now = int(opttime.startday(opttime.utcnow()).timestamp()) insertions_map = {} self.setup(src_replace_employee, dest_replace_employee, organization.pool_id, preset) @@ -1469,13 +1469,13 @@ def find_demo_organization(self, auth_user_id): Organization.is_demo.is_(true()), Organization.deleted.is_(False), Organization.created_at >= int( - (datetime.utcnow() - timedelta(days=7)).timestamp()) + (opttime.utcnow() - timedelta(days=7)).timestamp()) )) orgs = demo_organization_q.all() return len(orgs) == 1 def _get_prepared_live_demo(self): - live_demo_threshhold = datetime.utcnow() - timedelta( + live_demo_threshhold = opttime.utcnow() - timedelta( days=PREPARED_DEMO_LIFETIME_DAYS) live_demo = self.mongo_client.restapi.live_demos.find_one_and_delete({ 'created_at': {'$gte': int(live_demo_threshhold.timestamp())} diff --git a/rest_api/rest_api_server/controllers/node.py b/rest_api/rest_api_server/controllers/node.py index d11348304..7d09142e5 100644 --- a/rest_api/rest_api_server/controllers/node.py +++ b/rest_api/rest_api_server/controllers/node.py @@ -1,7 +1,7 @@ import json import logging +import tools.optscale_time as opttime -from datetime import datetime from retrying import retry from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import and_ @@ -77,7 +77,7 @@ def bulk_create(self, cloud_account_id, nodes_payload): } res = [] - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() try: for k, existing_node in existing_map.items(): changes = payload_map.pop(k, None) diff --git a/rest_api/rest_api_server/controllers/organization_constraint.py b/rest_api/rest_api_server/controllers/organization_constraint.py index feee12abf..8d9ec2e2a 100644 --- a/rest_api/rest_api_server/controllers/organization_constraint.py +++ b/rest_api/rest_api_server/controllers/organization_constraint.py @@ -1,5 +1,6 @@ import json import logging +import tools.optscale_time as opttime from collections import defaultdict from datetime import datetime, time, timedelta @@ -437,7 +438,7 @@ def create(self, **kwargs): organization = self.get_entity(organization_id) self._check_input(organization_id, organization, **kwargs) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() filled_filters = self._fill_filters( organization_id, now, kwargs.get('filters', {})) kwargs['filters'] = filled_filters @@ -522,7 +523,7 @@ def list(self, **kwargs): return result def delete_constraint_by_id(self, constraint_id): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() LOG.info("Deleting %s with id %s", self.get_model_name(), constraint_id) self.session.query(OrganizationLimitHit).filter( @@ -538,7 +539,7 @@ def delete_constraint_by_id(self, constraint_id): self.session.commit() def delete_constraints_with_hits(self, organization_id, filters=None): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() constraints_to_delete = [] all_constraints = self.session.query(OrganizationConstraint).filter( OrganizationConstraint.organization_id == organization_id, diff --git a/rest_api/rest_api_server/controllers/pool.py b/rest_api/rest_api_server/controllers/pool.py index 15b4442ed..55c9713d8 100644 --- a/rest_api/rest_api_server/controllers/pool.py +++ b/rest_api/rest_api_server/controllers/pool.py @@ -1,6 +1,6 @@ import datetime import logging - +import tools.optscale_time as opttime from collections import defaultdict from sqlalchemy import exists, and_ from sqlalchemy.exc import IntegrityError @@ -248,7 +248,7 @@ def delete_exports_for_pool(self, item_id): PoolExpensesExport.deleted.is_(False) ).one_or_none() if export: - now_ts = int(datetime.datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() export.deleted_at = now_ts self.session.add(export) self.session.commit() @@ -352,7 +352,7 @@ def reassign_cleanup(self, pool_id): ) ) for obj in objects: - obj.deleted_at = int(datetime.datetime.utcnow().timestamp()) + obj.deleted_at = opttime.utcnow_timestamp() self.session.commit() def get_sub_pools(self, pool_id, show_details=False): @@ -486,7 +486,7 @@ def calculate_costs(pool_id): def get_pool_expenses(self, pool_ids, period_day=None, end_date=None, forecast=True): expense_ctrl = ExpenseController(self._config) - today = datetime.datetime.utcnow() + today = opttime.utcnow() month_start = today.replace( day=1, hour=0, minute=0, second=0, microsecond=0) if not end_date: @@ -808,7 +808,7 @@ def get_overview(self, organization, details=False): 'policies': [policy.to_dict() for policy in pool_policies] }) - now = datetime.datetime.utcnow() + now = opttime.utcnow() pool_ids = [overview['id']] + [b['id'] for b in children] overview['saving'], _ = self.get_pool_savings(organization.id, pool_ids) diff --git a/rest_api/rest_api_server/controllers/pool_alert.py b/rest_api/rest_api_server/controllers/pool_alert.py index 116812783..9813af6f1 100644 --- a/rest_api/rest_api_server/controllers/pool_alert.py +++ b/rest_api/rest_api_server/controllers/pool_alert.py @@ -1,7 +1,7 @@ import logging import uuid import datetime - +import tools.optscale_time as opttime from sqlalchemy.exc import IntegrityError from sqlalchemy.sql import and_ from tools.optscale_exceptions.common_exc import (NotFoundException, @@ -88,7 +88,7 @@ def get_pools_children_ids(pool_id): alert_pool_ids.update(pool_ids) for alert_id, pool_ids in alert_pools.items(): alert = alert_map.get(alert_id) - now = int(datetime.datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() last_shoot = alert.last_shoot_at if last_shoot and last_shoot + NOTIFICATION_INTERVAL > now: continue diff --git a/rest_api/rest_api_server/controllers/pool_expense.py b/rest_api/rest_api_server/controllers/pool_expense.py index 7ed58e8ba..b5b8237b9 100644 --- a/rest_api/rest_api_server/controllers/pool_expense.py +++ b/rest_api/rest_api_server/controllers/pool_expense.py @@ -1,3 +1,4 @@ +import tools.optscale_time as opttime from calendar import monthrange from datetime import datetime, timedelta @@ -31,7 +32,7 @@ def _get_model_type(self): @property def today(self): if self._today is None: - self._today = datetime.utcnow() + self._today = opttime.utcnow() return self._today @property diff --git a/rest_api/rest_api_server/controllers/pool_expenses_export.py b/rest_api/rest_api_server/controllers/pool_expenses_export.py index 66dcf4a84..62541f194 100644 --- a/rest_api/rest_api_server/controllers/pool_expenses_export.py +++ b/rest_api/rest_api_server/controllers/pool_expenses_export.py @@ -1,4 +1,5 @@ import logging +import tools.optscale_time as opttime from datetime import datetime from tools.optscale_exceptions.common_exc import (ConflictException, NotFoundException) @@ -81,7 +82,7 @@ def get_expenses_data(self, export_id, start_date=None, end_date=None): pool_id = self.get_pool_id(export_id) if start_date is None and end_date is None: start_date = 0 - end_date = int(datetime.utcnow().timestamp()) + end_date = opttime.utcnow_timestamp() elif end_date is None: end_date = start_date + 24 * 60 * 60 - 1 pool = self.get_pool(pool_id) diff --git a/rest_api/rest_api_server/controllers/pool_policy.py b/rest_api/rest_api_server/controllers/pool_policy.py index 721ecaa3b..6ed7168ed 100644 --- a/rest_api/rest_api_server/controllers/pool_policy.py +++ b/rest_api/rest_api_server/controllers/pool_policy.py @@ -1,3 +1,4 @@ +import tools.optscale_time as opttime from datetime import datetime from collections import defaultdict @@ -111,7 +112,7 @@ def get_entity(self, item_id): def handle_ttl_hit(self, resource_data, constraint, now): created_at = int(resource_data.get( - 'mindate', datetime.utcnow()).timestamp()) + 'mindate', opttime.utcnow()).timestamp()) ts_limit = created_at + constraint.limit * 3600 if now > ts_limit: return ts_limit, now diff --git a/rest_api/rest_api_server/controllers/profiling/base.py b/rest_api/rest_api_server/controllers/profiling/base.py index aa72ba63f..d108f8c3e 100644 --- a/rest_api/rest_api_server/controllers/profiling/base.py +++ b/rest_api/rest_api_server/controllers/profiling/base.py @@ -6,6 +6,7 @@ from rest_api.rest_api_server.models.enums import RunStates from rest_api.rest_api_server.models.models import CloudAccount from rest_api.rest_api_server.utils import handle_http_exc +from tools.optscale_time import utcnow_timestamp DAY_IN_HOURS = 24 HOUR_IN_SEC = 3600 @@ -111,7 +112,7 @@ def _get_run_costs(self, cloud_account_ids: list, runs: list) -> dict: min_dt, max_dt = None, None executors = set() run_executor_duration_map = defaultdict(dict) - now = datetime.utcnow().timestamp() + now = utcnow_timestamp() for r in runs: r_start = r.get('start') or 0 r_finish = r.get('finish') or 0 diff --git a/rest_api/rest_api_server/controllers/profiling/optimization.py b/rest_api/rest_api_server/controllers/profiling/optimization.py index bb9732fe2..f5f196a7e 100644 --- a/rest_api/rest_api_server/controllers/profiling/optimization.py +++ b/rest_api/rest_api_server/controllers/profiling/optimization.py @@ -6,7 +6,7 @@ from tools.optscale_exceptions.common_exc import NotFoundException from rest_api.rest_api_server.exceptions import Err from requests.exceptions import HTTPError -from datetime import datetime +from tools.optscale_time import utcnow_timestamp LAST_RUNS_THRESHOLD = 7 * 86400 # 7 days @@ -52,7 +52,7 @@ def get_last_task_runs(self, task_id, profiling_token): raise NotFoundException( Err.OE0002, ['Task', task_id]) raise - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() return list(filter( lambda x: x['start'] >= now - LAST_RUNS_THRESHOLD, runs)) diff --git a/rest_api/rest_api_server/controllers/profiling/run.py b/rest_api/rest_api_server/controllers/profiling/run.py index dcf9eff34..307b1b607 100644 --- a/rest_api/rest_api_server/controllers/profiling/run.py +++ b/rest_api/rest_api_server/controllers/profiling/run.py @@ -13,6 +13,7 @@ from rest_api.rest_api_server.models.enums import RunStates from tools.optscale_exceptions.common_exc import NotFoundException +from tools.optscale_time import utcnow_timestamp DAY_IN_HOURS = 24 BYTES_IN_MB = 1024 * 1024 @@ -44,7 +45,7 @@ def formatted_run( run['status'] = RunStates(state).name finish = run.get('finish') if not finish and state == RunStates.running: - finish = datetime.utcnow().timestamp() + finish = utcnow_timestamp() run['duration'] = finish - run.get('start') if finish else None run['cost'] = run_costs.get(run['id'], 0) run['metrics'] = task_metrics diff --git a/rest_api/rest_api_server/controllers/profiling/task.py b/rest_api/rest_api_server/controllers/profiling/task.py index ac0bfbae5..2a02bed1b 100644 --- a/rest_api/rest_api_server/controllers/profiling/task.py +++ b/rest_api/rest_api_server/controllers/profiling/task.py @@ -17,6 +17,7 @@ from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.models.enums import RunStates from rest_api.rest_api_server.models.models import Employee +from tools.optscale_time import utcnow, utcnow_timestamp from tools.optscale_exceptions.common_exc import ( NotFoundException, ConflictException) @@ -56,7 +57,7 @@ def format_task( last_run_executor = None run_metrics = {} last_30_days_start = int( - (datetime.utcnow() - timedelta(days=30)).timestamp()) + (utcnow() - timedelta(days=30)).timestamp()) if runs: for r in runs: run_cost = run_costs.get(r['id'], 0) @@ -82,7 +83,7 @@ def format_task( finish = last.get('finish') if not finish and last['state'] == RunStates.running: - finish = datetime.utcnow().timestamp() + finish = utcnow_timestamp() last_run_duration = finish - last.get('start') if finish else None last_run_cost = run_costs.get(last['id'], 0) diff --git a/rest_api/rest_api_server/controllers/report_import.py b/rest_api/rest_api_server/controllers/report_import.py index a0fbe5bf7..fb18c0ed9 100644 --- a/rest_api/rest_api_server/controllers/report_import.py +++ b/rest_api/rest_api_server/controllers/report_import.py @@ -1,7 +1,7 @@ import logging import uuid +import tools.optscale_time as opttime from sqlalchemy import and_, true, or_, exists -from datetime import datetime import boto3 from tools.optscale_exceptions.common_exc import ( NotFoundException, FailedDependency, WrongArgumentsException @@ -47,7 +47,7 @@ def create(self, cloud_account_id, import_file=None, recalculate=False, priority return report_import def check_unprocessed_imports(self, cloud_account_id): - dt = datetime.utcnow().timestamp() + dt = opttime.utcnow().timestamp() scheduled_threshold = dt - NOT_PROCESSED_REPORT_THRESHOLD active_threshold = dt - ACTIVE_IMPORT_THRESHOLD return self.session.query( @@ -93,7 +93,7 @@ def is_initial_completed_report(self, updated_report): return completed_import and completed_import.id == updated_report.id def edit(self, item_id, **kwargs): - kwargs['updated_at'] = int(datetime.utcnow().timestamp()) + kwargs['updated_at'] = opttime.utcnow_timestamp() updated_report = super().edit(item_id, **kwargs) state = kwargs.get('state') if updated_report.is_recalculation: @@ -352,7 +352,7 @@ def list(self, cloud_account_id, show_completed=False, show_active=False): self.model_type.state != ImportStates.COMPLETED ) if show_active: - ts = datetime.utcnow().timestamp() - ACTIVE_IMPORT_THRESHOLD + ts = opttime.utcnow_timestamp() - ACTIVE_IMPORT_THRESHOLD query = query.filter( self.model_type.state == ImportStates.IN_PROGRESS, self.model_type.updated_at >= ts diff --git a/rest_api/rest_api_server/controllers/resource_constraint.py b/rest_api/rest_api_server/controllers/resource_constraint.py index 34a26585a..da0b34f4e 100644 --- a/rest_api/rest_api_server/controllers/resource_constraint.py +++ b/rest_api/rest_api_server/controllers/resource_constraint.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +import tools.optscale_time as opttime from sqlalchemy.sql import and_ from rest_api.rest_api_server.exceptions import Err @@ -100,7 +100,7 @@ def handle_ttl_hit(self, resource_data, constraint, now): def check_limit(self, constraint_type, limit): check_int_attribute('limit', limit) if constraint_type == ConstraintTypes.TTL and limit != 0: - if limit - int(datetime.utcnow().timestamp()) < 0: + if limit - opttime.utcnow_timestamp() < 0: raise WrongArgumentsException(Err.OE0461, ['Limit']) def get_entity(self, item_id): diff --git a/rest_api/rest_api_server/controllers/resource_observer.py b/rest_api/rest_api_server/controllers/resource_observer.py index 914d61a0d..4aa3cd41e 100644 --- a/rest_api/rest_api_server/controllers/resource_observer.py +++ b/rest_api/rest_api_server/controllers/resource_observer.py @@ -1,7 +1,6 @@ import logging - +import tools.optscale_time as opttime from sqlalchemy import and_, false -from datetime import datetime from pymongo import UpdateMany from tools.cloud_adapter.model import ResourceTypes @@ -77,7 +76,7 @@ def _clear_clusters_active_flags(self, cluster_ids, organization_id): return list(resources_.values()) def observe(self, organization_id): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() last_run = now - NEWLY_DISCOVERED_TIME organization = self._get_organization(organization_id) if not organization: @@ -192,8 +191,7 @@ def process_violated_resources(self, organization_id): {'organization_id': organization_id} ], 'active': True, - 'last_seen': {'$gte': int( - datetime.utcnow().timestamp()) - HOUR_IN_SEC}, + 'last_seen': {'$gte': opttime.utcnow_timestamp() - HOUR_IN_SEC}, 'cluster_type_id': {'$exists': False} }, ['cloud_account_id', 'cluster_id', 'first_seen', 'pool_id', 'total_cost', 'last_expense']) @@ -215,7 +213,7 @@ def _proccess_released_resources(self, resources, last_run): resource_ids = [x.resource_id for x in resources] ctrl = ShareableBookingController(self.session, self._config, self.token) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() bookings = self.session.query(ShareableBooking).filter(and_( ShareableBooking.deleted.is_(False), ShareableBooking.resource_id.in_(resource_ids))).all() diff --git a/rest_api/rest_api_server/controllers/rule.py b/rest_api/rest_api_server/controllers/rule.py index f3b54012d..a426f6d76 100644 --- a/rest_api/rest_api_server/controllers/rule.py +++ b/rest_api/rest_api_server/controllers/rule.py @@ -1,6 +1,6 @@ import json import logging -from datetime import datetime +import tools.optscale_time as opttime from retrying import retry from sqlalchemy.exc import IntegrityError @@ -388,7 +388,7 @@ def _process_rule_fields_update(self, original, organization_id, token, return original def _process_conditions_update(self, original, conditions): - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() new_conditions = [] updated_conditions_map = {} for condition in conditions: @@ -464,7 +464,7 @@ def update_priority(self, rule_id, **kwargs): @retry(**RULE_PRIORITY_RETRIES) def delete(self, item_id, **kwargs): # TODO implement permissions check OSB-412 - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() rule = self.get(item_id, **kwargs) all_rules = self._get_rules(rule.organization_id) try: diff --git a/rest_api/rest_api_server/controllers/shareable_resource.py b/rest_api/rest_api_server/controllers/shareable_resource.py index 2e2fa3d86..de2a617e2 100644 --- a/rest_api/rest_api_server/controllers/shareable_resource.py +++ b/rest_api/rest_api_server/controllers/shareable_resource.py @@ -1,3 +1,4 @@ +import tools.optscale_time as opttime from datetime import datetime from optscale_client.herald_client.client_v2 import Client as HeraldClient import logging @@ -135,7 +136,7 @@ def _check_slots_with_existing(self, resource_id, acquired_since, if not released_at: released_at = 0 if not acquired_since: - acquired_since = int(datetime.utcnow().timestamp()) + acquired_since = opttime.utcnow_timestamp() shareable_bookings = self.session.query( ShareableBooking).filter( ShareableBooking.resource_id == resource_id, @@ -285,7 +286,7 @@ def create(self, **kwargs): self._check_dates(acquired_since, released_at) self._check_slots_with_existing(resource_id, acquired_since, released_at) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() if (is_admin_permission is False and acquired_since is not None and acquired_since < now_ts): raise ForbiddenException(Err.OE0495, []) @@ -304,7 +305,7 @@ def get_by_id(self, item_id, **kwargs): return self.fill_booking_acquired_by(result) def get_resource_current_booking(self, resource_id): - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() resource_booking = self.session.query(ShareableBooking).filter( and_(ShareableBooking.resource_id == resource_id, ShareableBooking.acquired_since <= now_ts, @@ -316,7 +317,7 @@ def get_resource_current_booking(self, resource_id): return booking def get_upcoming_booking(self, resource_id, current_booking=None): - acquired_since = int(datetime.utcnow().timestamp()) + acquired_since = opttime.utcnow_timestamp() if current_booking and current_booking.get('released_at'): acquired_since = current_booking['released_at'] resource_bookings = self.session.query(ShareableBooking).filter( @@ -370,7 +371,7 @@ def check_autorelease(self, item_id): if (item.jira_auto_release and not self.get_jira_issue_attachments([item.id])): self.release(item.id, is_admin_permission=True, - released_at=int(datetime.utcnow().timestamp())) + released_at=opttime.utcnow_timestamp()) def release(self, item_id, is_admin_permission, **kwargs): item = self.get(item_id) @@ -383,7 +384,7 @@ def release(self, item_id, is_admin_permission, **kwargs): raise WrongArgumentsException(Err.OE0480, ['Resource', resource['_id']]) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() released_at = kwargs.get('released_at') check_int_attribute('released_at', released_at) # only released_at param is expected @@ -420,7 +421,7 @@ def release(self, item_id, is_admin_permission, **kwargs): if released_at <= now_ts: self.publish_task({ 'organization_id': item.organization_id, - 'observe_time': int(datetime.utcnow().timestamp()), + 'observe_time': opttime.utcnow_timestamp(), 'resource': resource, 'object_id': item.id }) @@ -477,7 +478,7 @@ def get_shareable_resources(self, organization_id): Pool.deleted.is_(False), ).all() pools_map = {pool.id: pool for pool in pools} - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() shareable_bookings = self.session.query( ShareableBooking).filter( ShareableBooking.organization_id == organization_id, @@ -520,7 +521,7 @@ def get_resource_bookings(self, resource_id): self._check_resource(resource) else: raise NotFoundException(Err.OE0002, ['Resource', resource_id]) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() resource_bookings = [] bookings = self.session.query( ShareableBooking).filter( @@ -542,7 +543,7 @@ def delete(self, item_id, is_admin_permission=False): item_id]) resource = next(self.resources_collection.find( {'_id': shareable_booking.resource_id}).limit(1)) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() # admin can delete all bookings, resource owner can delete only # future bookings if (not is_admin_permission and diff --git a/rest_api/rest_api_server/controllers/ttl_analysis.py b/rest_api/rest_api_server/controllers/ttl_analysis.py index 39bd6cfc1..96be1d3f3 100644 --- a/rest_api/rest_api_server/controllers/ttl_analysis.py +++ b/rest_api/rest_api_server/controllers/ttl_analysis.py @@ -1,4 +1,5 @@ import logging +import tools.optscale_time as opttime from datetime import datetime from tools.optscale_exceptions.common_exc import ( @@ -52,7 +53,7 @@ def get(self, pool_id, start_date, end_date=None, ttl=None): raise NotFoundException(Err.OE0002, [Pool.__name__, pool_id]) if end_date is None: - end_date = int(datetime.utcnow().timestamp()) + end_date = opttime.utcnow_timestamp() if start_date > end_date: raise WrongArgumentsException(Err.OE0446, ['end_date', 'start_date']) diff --git a/rest_api/rest_api_server/handlers/v2/k8s_rightsizing.py b/rest_api/rest_api_server/handlers/v2/k8s_rightsizing.py index 05ef44554..29db8c15b 100644 --- a/rest_api/rest_api_server/handlers/v2/k8s_rightsizing.py +++ b/rest_api/rest_api_server/handlers/v2/k8s_rightsizing.py @@ -11,6 +11,7 @@ from rest_api.rest_api_server.exceptions import Err from tools.optscale_exceptions.common_exc import WrongArgumentsException from tools.optscale_exceptions.http_exc import OptHTTPError +from tools.optscale_time import utcnow_timestamp LOG = logging.getLogger(__name__) DEFAULT_INTERVAL = 900 @@ -31,7 +32,7 @@ def get_k8s_metric_arguments(self): args.update( { 'end_date': self.get_arg( - 'end_date', int, default=int(datetime.utcnow().timestamp()) + 'end_date', int, default=utcnow_timestamp() ), 'cloud_account_id': self.get_arg('cloud_account_id', str, None) }) diff --git a/rest_api/rest_api_server/handlers/v2/resource_metrics.py b/rest_api/rest_api_server/handlers/v2/resource_metrics.py index 7477071c3..d6b0fa732 100644 --- a/rest_api/rest_api_server/handlers/v2/resource_metrics.py +++ b/rest_api/rest_api_server/handlers/v2/resource_metrics.py @@ -14,6 +14,7 @@ from optscale_client.metroculus_client.client import Client as MetroculusClient from tools.optscale_exceptions.common_exc import WrongArgumentsException from tools.optscale_exceptions.http_exc import OptHTTPError +from tools.optscale_time import utcnow_timestamp LOG = logging.getLogger(__name__) DEFAULT_INTERVAL = 900 @@ -42,7 +43,8 @@ def check_date_arguments(args): def get_metric_arguments(self): args = { 'start_date': self.get_arg('start_date', int), - 'end_date': self.get_arg('end_date', int, default=int(datetime.utcnow().timestamp())), + 'end_date': self.get_arg('end_date', int, + default=utcnow_timestamp()), 'interval': self.get_arg('interval', int, default=DEFAULT_INTERVAL) } for param, value in args.items(): diff --git a/rest_api/rest_api_server/handlers/v2/shareable_resources.py b/rest_api/rest_api_server/handlers/v2/shareable_resources.py index d6c3134ad..7ead0ecfc 100644 --- a/rest_api/rest_api_server/handlers/v2/shareable_resources.py +++ b/rest_api/rest_api_server/handlers/v2/shareable_resources.py @@ -12,6 +12,7 @@ from rest_api.rest_api_server.models.enums import RolePurposes from rest_api.rest_api_server.utils import run_task, ModelEncoder, check_int_attribute from tools.optscale_exceptions.common_exc import WrongArgumentsException +from tools.optscale_time import utcnow_timestamp class ShareableBookingBaseAsyncHandler(BaseAuthHandler, BaseHandler): @@ -246,7 +247,7 @@ async def get(self, organization_id): args = { 'start_date': self.get_arg('start_date', int), 'end_date': self.get_arg( - 'end_date', int, default=int(datetime.utcnow().timestamp())) + 'end_date', int, default=utcnow_timestamp()) } self.check_date_arguments(args) res = await run_task(self.controller.get_shareable_bookings, diff --git a/rest_api/rest_api_server/tests/unittests/test_api_base.py b/rest_api/rest_api_server/tests/unittests/test_api_base.py index 2cf8a4dcc..89f239019 100644 --- a/rest_api/rest_api_server/tests/unittests/test_api_base.py +++ b/rest_api/rest_api_server/tests/unittests/test_api_base.py @@ -3,6 +3,7 @@ import uuid import subprocess import tempfile +import tools.optscale_time as opttime from ast import literal_eval from datetime import datetime, timezone from unittest.mock import patch, PropertyMock @@ -142,7 +143,7 @@ def setUp(self, version='v2'): 'rest_api.rest_api_server.handlers.v1.base.BaseAuthHandler.' 'get_meta_by_token', return_value={ 'user_id': self._user_id, - 'valid_until': datetime.utcnow().timestamp() * 2 + 'valid_until': opttime.utcnow_timestamp() * 2 }).start() patch('rest_api.rest_api_server.controllers.base.BaseController.' 'assign_role_to_user').start() @@ -297,7 +298,7 @@ def create_cloud_account(self, organization_id, config, account_id=None, for info in infos['discovery_info']: self.client.discovery_info_update( info['id'], - {'last_discovery_at': int(datetime.utcnow().timestamp())}) + {'last_discovery_at': opttime.utcnow_timestamp()}) if ctrl: mock.stop() return code, cloud_acc @@ -326,7 +327,7 @@ def _to_discovered_resource(cad_resource, first_seen=None): obj.pop('resource_id', None) obj['meta'] = getattr(cad_resource, 'meta') obj['resource_type'] = resource_type_map.get(model) - obj['last_seen'] = int(datetime.utcnow().timestamp()) + obj['last_seen'] = opttime.utcnow_timestamp() if first_seen is not None: obj['first_seen'] = first_seen obj['active'] = True @@ -334,7 +335,7 @@ def _to_discovered_resource(cad_resource, first_seen=None): def resource_discovery_call(self, resources, create_resources=True, first_seen=None): - start_time = int(datetime.utcnow().timestamp()) + start_time = opttime.utcnow_timestamp() payloads_map = {} for rss in resources: obj = self._to_discovered_resource(rss, first_seen) @@ -362,7 +363,7 @@ def export_id_from_link(link): return link.rsplit('/', 1)[1] def _make_resources_active(self, resource_ids): - seen_time = int(datetime.utcnow().timestamp() - 1) + seen_time = opttime.utcnow_timestamp() - 1 self.resources_collection.bulk_write([UpdateMany( filter={'_id': {'$in': resource_ids}}, update={'$set': {'last_seen': seen_time, 'active': True}}, @@ -589,7 +590,7 @@ def get_csv(path): [ ('cloud_account_id', 'String', 'default'), ('resource_id', 'String', 'default'), - ('date', 'DateTime', datetime.utcnow()), + ('date', 'DateTime', opttime.utcnow()), ('cost', 'Float64', 0), ('sign', 'Int8', 1) ], self.expenses @@ -598,7 +599,7 @@ def get_csv(path): [ ('cloud_account_id', 'String', 'default'), ('resource_id', 'String', 'default'), - ('date', 'DateTime', datetime.utcnow()), + ('date', 'DateTime', opttime.utcnow()), ('type', "Enum8('outbound' = 1, 'inbound' = 2)", 1), ('from', 'String', 'default'), ('to', 'String', 'default'), @@ -611,7 +612,7 @@ def get_csv(path): [ ('cloud_account_id', 'String', 'default'), ('resource_id', 'String', 'default'), - ('date', 'DateTime', datetime.utcnow()), + ('date', 'DateTime', opttime.utcnow()), ('instance_type', 'String', ''), ('offer_id', 'String', 'default'), ('offer_type', "Enum8('ri' = 1, 'sp' = 2)", 1), @@ -628,7 +629,7 @@ def get_csv(path): [ ('cloud_account_id', 'String', 'default'), ('resource_id', 'String', 'default'), - ('date', 'DateTime', datetime.utcnow()), + ('date', 'DateTime', opttime.utcnow()), ('instance_type', 'String', 'default'), ('os', 'String', 'default'), ('location', 'String', 'default'), @@ -644,7 +645,7 @@ def get_csv(path): ('bucket', 'String', 'default'), ('key', 'String', 'default'), ('size', 'Integer', 1), - ('date', 'DateTime', datetime.utcnow()), + ('date', 'DateTime', opttime.utcnow()), ], self.gemini ) } @@ -743,7 +744,7 @@ def switch_user(self, user_id): def _mock_auth_user(self, user_id): self.p_get_meta_by_token.return_value = { 'user_id': user_id, - 'valid_until': datetime.utcnow().timestamp() * 2 + 'valid_until': opttime.utcnow_timestamp() * 2 } def delete_organization(self, org_id): @@ -831,7 +832,7 @@ def create_org_constraint(organization_id, pool_id, filters=None, ) session.add(constraint) if deleted: - constraint.deleted_at = int(datetime.utcnow().timestamp()) + constraint.deleted_at = opttime.utcnow_timestamp() session.commit() res = constraint.to_dict() res['type'] = res['type'].value @@ -856,7 +857,7 @@ def create_org_limit_hit(self, organization_id, pool_id, constraint_id=None, hit.created_at = created_at session.add(hit) if deleted: - hit.deleted_at = int(datetime.utcnow().timestamp()) + hit.deleted_at = opttime.utcnow_timestamp() session.commit() res = hit.to_dict() return res diff --git a/rest_api/rest_api_server/tests/unittests/test_archived_recommendations_api.py b/rest_api/rest_api_server/tests/unittests/test_archived_recommendations_api.py index f2db92001..ae270d2d3 100644 --- a/rest_api/rest_api_server/tests/unittests/test_archived_recommendations_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_archived_recommendations_api.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime +import tools.optscale_time as opttime from unittest.mock import patch from rest_api.rest_api_server.handlers.v2.archived_recommendations import ( @@ -37,7 +37,7 @@ def setUp(self, version='v2'): _, self.cloud_acc = self.create_cloud_account( self.org_id, creds, auth_user_id=self.user_id) - self.end_date = int(datetime.utcnow().timestamp()) + self.end_date = opttime.utcnow_timestamp() self.start_date = self.end_date - TWO_WEEK_TS self.instance = { 'cloud_resource_id': 'i-9323123124', @@ -67,7 +67,7 @@ def setUp(self, version='v2'): def _add_archive_recommendation(self, organization_id, module, reason, data, archived_at=None): if not archived_at: - archived_at = int(datetime.utcnow().timestamp()) + archived_at = opttime.utcnow_timestamp() record = { 'module': module, 'organization_id': organization_id, diff --git a/rest_api/rest_api_server/tests/unittests/test_assignment_api_bulk.py b/rest_api/rest_api_server/tests/unittests/test_assignment_api_bulk.py index 09c5d4105..cceec3119 100644 --- a/rest_api/rest_api_server/tests/unittests/test_assignment_api_bulk.py +++ b/rest_api/rest_api_server/tests/unittests/test_assignment_api_bulk.py @@ -1,3 +1,4 @@ +import tools.optscale_time as opttime from datetime import datetime from freezegun import freeze_time from unittest.mock import patch @@ -144,7 +145,7 @@ def test_create_assignment_bulk_idempotency(self, p_authorize): No new assignments. """ - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() p_authorize.return_value = True with self.switch_user(self.user2_id): assignments = self._get_assignments() @@ -408,7 +409,7 @@ def test_expenses_after_create_assignment_bulk(self, p_authorize): self._add_expense(resource, day_in_past) code, resp = self.client.clean_expenses_get( self.org_id, int(day_in_past.timestamp()), - int(datetime.utcnow().timestamp())) + opttime.utcnow_timestamp()) self.assertEqual(code, 200) expenses = resp['clean_expenses'] self.assertEqual(len(expenses), len(all_resources)) @@ -424,7 +425,7 @@ def test_expenses_after_create_assignment_bulk(self, p_authorize): assignment['resource_id'] in all_res_ids) code, resp = self.client.clean_expenses_get( self.org_id, int(day_in_past.timestamp()), - int(datetime.utcnow().timestamp())) + opttime.utcnow_timestamp()) expenses = resp['clean_expenses'] self.assertEqual(len(expenses), len(all_resources)) diff --git a/rest_api/rest_api_server/tests/unittests/test_available_filters.py b/rest_api/rest_api_server/tests/unittests/test_available_filters.py index 351e09415..0046d489c 100644 --- a/rest_api/rest_api_server/tests/unittests/test_available_filters.py +++ b/rest_api/rest_api_server/tests/unittests/test_available_filters.py @@ -1,4 +1,4 @@ -import os +import tools.optscale_time as opttime from datetime import datetime, timezone from unittest.mock import patch @@ -94,7 +94,7 @@ def test_available_filters_dates_values(self): self.assertEqual(response['error']['error_code'], 'OE0224') def test_available_filters_limit(self): - time = int(datetime.utcnow().timestamp()) + time = opttime.utcnow_timestamp() code, response = self.client.available_filters_get( self.org_id, time, time + 1, {'limit': 1}) self.assertEqual(code, 400) diff --git a/rest_api/rest_api_server/tests/unittests/test_breakdown_expenses.py b/rest_api/rest_api_server/tests/unittests/test_breakdown_expenses.py index 79d827fd0..2e54743b4 100644 --- a/rest_api/rest_api_server/tests/unittests/test_breakdown_expenses.py +++ b/rest_api/rest_api_server/tests/unittests/test_breakdown_expenses.py @@ -4,6 +4,7 @@ from unittest.mock import patch from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import get_nil_uuid +from tools.optscale_time import utcfromtimestamp, utcnow_timestamp, utcnow DAY_IN_SECONDS = 86400 @@ -108,7 +109,7 @@ def test_breakdown_expenses_dates_values(self): self.verify_error_code(response, 'OE0224') def test_breakdown_expenses_limit(self): - time = int(datetime.utcnow().timestamp()) + time = utcnow_timestamp() code, response = self.client.breakdown_expenses_get( self.org_id, time, time + 1, 'pool_id', {'limit': 1}) self.assertEqual(code, 400) @@ -142,7 +143,7 @@ def test_invalid_organization(self): def test_breakdown_expenses_invalid_breakdown_by(self): for breakdown_by in [1, 'invalid']: - time = int(datetime.utcnow().timestamp()) + time = utcnow_timestamp() code, response = self.client.breakdown_expenses_get( self.org_id, time, time + 1, breakdown_by) self.assertEqual(code, 400) @@ -159,21 +160,21 @@ def test_breakdown_expenses_without_breakdown_by(self): { 'cloud_account_id': self.cloud_acc1['id'], 'resource_id': res1['id'], - 'date': datetime.utcfromtimestamp(day_1_ts), + 'date': utcfromtimestamp(day_1_ts), 'cost': 10, 'sign': 1 }, { 'cloud_account_id': self.cloud_acc2['id'], 'resource_id': res2['id'], - 'date': datetime.utcfromtimestamp(day_2_ts), + 'date': utcfromtimestamp(day_2_ts), 'cost': 20, 'sign': 1 }, { 'cloud_account_id': self.cloud_acc1['id'], 'resource_id': res1['id'], - 'date': datetime.utcfromtimestamp(day_1_ts) - timedelta(days=1), + 'date': utcfromtimestamp(day_1_ts) - timedelta(days=1), 'cost': 1, 'sign': 1 }, @@ -207,7 +208,7 @@ def test_breakdown_by_values(self): self.expenses.append({ 'cloud_account_id': res['cloud_account_id'], 'resource_id': res['id'], - 'date': datetime.utcfromtimestamp(day_1_ts), + 'date': utcfromtimestamp(day_1_ts), 'cost': 10, 'sign': 1 }) @@ -268,7 +269,7 @@ def test_breakdown_expenses(self): last_seen=day_1_ts, region='us-none', service_name='service1', pool_id=self.sub_pool1['id'], employee_id=self.employee1['id'] ) - day_1 = datetime.utcfromtimestamp(day_1_ts) + day_1 = utcfromtimestamp(day_1_ts) self.expenses.extend([ { 'cloud_account_id': self.cloud_acc1['id'], @@ -462,7 +463,7 @@ def test_breakdown_expenses(self): }) def test_no_expenses(self): - end_date = datetime.utcnow() + end_date = utcnow() start_date = end_date - timedelta(days=7) code, resp = self.client.breakdown_expenses_get( self.org_id, int(start_date.timestamp()), int(end_date.timestamp())) @@ -698,7 +699,7 @@ def test_breakdown_expenses_traffic_filters(self): last_seen=day_1_ts, region='us-none', service_name='service1', pool_id=self.sub_pool1['id'], employee_id=self.employee1['id'] ) - day_1 = datetime.utcfromtimestamp(day_1_ts) + day_1 = utcfromtimestamp(day_1_ts) self.expenses.extend([ { 'cloud_account_id': self.cloud_acc1['id'], diff --git a/rest_api/rest_api_server/tests/unittests/test_breakdown_tags.py b/rest_api/rest_api_server/tests/unittests/test_breakdown_tags.py index 728a3da36..28393ed69 100644 --- a/rest_api/rest_api_server/tests/unittests/test_breakdown_tags.py +++ b/rest_api/rest_api_server/tests/unittests/test_breakdown_tags.py @@ -1,4 +1,4 @@ -import os +import tools.optscale_time as opttime from collections import defaultdict from datetime import datetime, timezone, timedelta from unittest.mock import patch @@ -132,7 +132,7 @@ def test_breakdown_tags_dates_values(self): self.verify_error_code(response, 'OE0224') def test_breakdown_tags_limit(self): - time = int(datetime.utcnow().timestamp()) + time = opttime.utcnow_timestamp() code, response = self.client.breakdown_tags_get( self.org_id, time, time + 1, {'limit': 1}) self.assertEqual(code, 400) diff --git a/rest_api/rest_api_server/tests/unittests/test_calendar_observer.py b/rest_api/rest_api_server/tests/unittests/test_calendar_observer.py index 902799156..2264e262b 100644 --- a/rest_api/rest_api_server/tests/unittests/test_calendar_observer.py +++ b/rest_api/rest_api_server/tests/unittests/test_calendar_observer.py @@ -1,4 +1,5 @@ import uuid +import tools.optscale_time as opttime from datetime import datetime, timedelta from freezegun import freeze_time from unittest.mock import patch, ANY, PropertyMock @@ -220,7 +221,7 @@ def test_observe_event_changed(self, p_create_event, p_public_ip): summary=f"{resource.get('name')} is acquired by {self.employee['name']}") p_list_event.assert_called_once_with( c_sync['calendar_id'], ANY, ANY, - datetime.utcnow() - timedelta(days=28) + opttime.utcnow() - timedelta(days=28) ) with patch( @@ -350,7 +351,7 @@ def test_observe_event_deleted(self, p_create_event, c_sync['calendar_id'], event['id'], status='confirmed') p_list_event.assert_called_once_with( c_sync['calendar_id'], ANY, ANY, - datetime.utcnow() - timedelta(days=28) + opttime.utcnow() - timedelta(days=28) ) with freeze_time(dt + timedelta(days=1)): with patch('optscale_client.config_client.client.Client.google_calendar_service_key', diff --git a/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py b/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py index 13d679620..a451c81a8 100644 --- a/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py +++ b/rest_api/rest_api_server/tests/unittests/test_cloud_accounts.py @@ -1,6 +1,7 @@ import datetime import copy import uuid +import tools.optscale_time as opttime from copy import deepcopy from freezegun import freeze_time @@ -160,7 +161,7 @@ def test_pool_and_rule_for_created_cloud_acc(self): _, employee = self.client.employee_create( self.org['id'], {'name': 'employee', 'auth_user_id': auth_user_id}) - created_at = datetime.datetime.utcnow().timestamp() + created_at = opttime.utcnow_timestamp() with freeze_time(datetime.datetime.fromtimestamp(created_at)): _, cloud_acc = self.create_cloud_account( self.org_id, self.valid_aws_cloud_acc, @@ -200,7 +201,7 @@ def test_pool_name_for_created_cloud(self): 'auth_user_id': auth_user_id}) cloud_name = 'aws cloud name' self.valid_aws_cloud_acc['name'] = cloud_name - created_at = datetime.datetime.utcnow().timestamp() + created_at = opttime.utcnow_timestamp() with freeze_time(datetime.datetime.fromtimestamp(created_at - 6)): _, cloud_acc = self.create_cloud_account( self.org_id, self.valid_aws_cloud_acc, @@ -795,7 +796,7 @@ def test_create_cloud_acc_duplicate_name(self): self.assertEqual(code, 409) def test_create_duplicate_name_after_deletion(self): - created_at = datetime.datetime.utcnow().timestamp() + created_at = opttime.utcnow_timestamp() with freeze_time(datetime.datetime.fromtimestamp(created_at - 1)): _, resp = self.create_cloud_account( self.org_id, self.valid_aws_cloud_acc) @@ -1438,7 +1439,7 @@ def test_patch_kubernetes_import_time(self): 'validate_credentials', return_value={'account_id': cloud_acc['account_id'], 'warnings': []}).start() - ts = int(datetime.datetime.utcnow().timestamp()) + ts = opttime.utcnow_timestamp() code, res = self.client.cloud_account_update( cloud_acc['id'], {'last_import_at': ts}) self.assertEqual(code, 200) @@ -1719,7 +1720,7 @@ def test_notify_ca_changed_skip_sending(self, schedule_patch, publish_patch): self.org_id, self.valid_aws_cloud_acc, account_id=account_id) self.assertEqual(code, 201) - params = {'last_import_at': int(datetime.datetime.utcnow().timestamp())} + params = {'last_import_at': opttime.utcnow_timestamp()} patch('tools.cloud_adapter.clouds.aws.Aws.validate_credentials', return_value={'account_id': account_id, 'warnings': []}).start() publish_patch.reset_mock() diff --git a/rest_api/rest_api_server/tests/unittests/test_cloud_resources.py b/rest_api/rest_api_server/tests/unittests/test_cloud_resources.py index d760b0ed7..f171c3e99 100644 --- a/rest_api/rest_api_server/tests/unittests/test_cloud_resources.py +++ b/rest_api/rest_api_server/tests/unittests/test_cloud_resources.py @@ -1,5 +1,6 @@ import uuid -from datetime import datetime, timezone +import tools.optscale_time as opttime +from datetime import datetime from freezegun import freeze_time from pymongo import UpdateOne from unittest.mock import patch, ANY @@ -358,7 +359,7 @@ def test_patch_shareable_with_bookings(self): res['resource_type'] = 'Instance' code, resource = self.cloud_resource_create(self.cloud_acc_id, res) self.assertEqual(code, 201) - seen_time = int(datetime.utcnow().timestamp() - 5) + seen_time = opttime.utcnow_timestamp() - 5 self.resources_collection.bulk_write([UpdateOne( filter={'_id': resource['id']}, update={'$set': {'last_seen': seen_time, 'active': True}})]) @@ -576,7 +577,7 @@ def test_cloud_resource_id_range(self): self.assertEqual(code, 201) def add_cached_resource(self, resource_ids, valid_until=None, active=True): - last_seen = int(datetime.utcnow().timestamp()) + last_seen = opttime.utcnow_timestamp() if valid_until: active = valid_until > last_seen last_seen = valid_until - DEFAULT_CACHE_TIME @@ -590,7 +591,7 @@ def add_cached_resource(self, resource_ids, valid_until=None, active=True): def add_recommendations(self, resource_id, modules, timestamp=None, last_check=None, checklist=True): if not timestamp: - timestamp = int(datetime.utcnow().timestamp()) + timestamp = opttime.utcnow_timestamp() recommendations = { 'modules': modules, @@ -695,9 +696,9 @@ def test_get_cloud_resource(self): self.assertEqual(details['pool_purpose'], 'business_unit') self.add_cached_resource( - [resource['id'], 'res_3_id'], datetime.utcnow().timestamp() + 500) + [resource['id'], 'res_3_id'], opttime.utcnow_timestamp() + 500) self.add_cached_resource( - [resource['id'], 'res_3_id'], datetime.utcnow().timestamp() + 1000) + [resource['id'], 'res_3_id'], opttime.utcnow_timestamp() + 1000) code, response = self.client.cloud_resource_get( resource['id'], details=True) details = response.get('details') @@ -776,7 +777,7 @@ def test_get_environment_resource(self): self.extend_expenses(expenses) self.add_cached_resource( [resource['id'], 'res_3_id'], - datetime.utcnow().timestamp() + 1000) + opttime.utcnow_timestamp() + 1000) code, response = self.client.cloud_resource_get( resource['id'], details=True) @@ -847,7 +848,7 @@ def test_get_cloud_resource_with_bookings(self): self.assertIn(resource['id'], data['details']['env_properties_collector_link']) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = opttime.utcnow_timestamp() schedule_book = { 'resource_id': resource['id'], 'acquired_by_id': employee['id'], @@ -877,7 +878,7 @@ def test_get_cloud_resource_details_without_expenses(self): 'pool_id': self.org['pool_id'], 'employee_id': employee['id'] } - now = datetime.utcnow() + now = opttime.utcnow() with freeze_time(now): _, resource = self.cloud_resource_create( self.cloud_acc_id, resource_dict) @@ -1049,7 +1050,7 @@ def test_resource_details_deleted_constraints(self): _, resource = self.cloud_resource_create( self.cloud_acc_id, resource_dict) self.add_cached_resource([resource['id']]) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() self.client.pool_policy_create( self.org['pool_id'], { 'limit': 150, @@ -1101,8 +1102,8 @@ def test_resource_active_several_caches(self): self.cloud_acc_id, resource_dict) cache_map = [ - (resource['id'], int(datetime.utcnow().timestamp()) + 1000), - (str(uuid.uuid4()), int(datetime.utcnow().timestamp()) + 2000), + (resource['id'], opttime.utcnow_timestamp() + 1000), + (str(uuid.uuid4()), opttime.utcnow_timestamp() + 2000), ] for res_id, valid_until in cache_map: self.add_cached_resource([res_id], valid_until) @@ -1159,7 +1160,7 @@ def test_resource_recommendations_old_checklist(self): code, resource = self.cloud_resource_create(self.cloud_acc_id, self.valid_resource) self.add_cached_resource([resource['id']]) - last_check = int(datetime.utcnow().timestamp()) + last_check = opttime.utcnow_timestamp() timstamp = last_check - 1 self.add_recommendations(resource['id'], [ {'name': 'module1', 'saving': 10}, @@ -1224,7 +1225,7 @@ def test_get_detail_traffic_info(self): { 'cloud_account_id': self.cloud_acc['id'], 'resource_id': resource['cloud_resource_id'], - 'date': int(datetime.utcnow().timestamp()), + 'date': opttime.utcnow_timestamp(), 'type': 1, 'from': 'region_1', 'to': 'External', @@ -1235,7 +1236,7 @@ def test_get_detail_traffic_info(self): { 'cloud_account_id': self.cloud_acc['id'], 'resource_id': resource['cloud_resource_id'], - 'date': int(datetime.utcnow().timestamp()), + 'date': opttime.utcnow_timestamp(), 'type': 1, 'from': 'region_2', 'to': 'External', @@ -1267,7 +1268,7 @@ def test_get_cluster_detail_traffic_info(self): { 'cloud_account_id': self.cloud_acc['id'], 'resource_id': resource['cloud_resource_id'], - 'date': int(datetime.utcnow().timestamp()), + 'date': opttime.utcnow_timestamp(), 'type': 1, 'from': 'region_1', 'to': 'External', diff --git a/rest_api/rest_api_server/tests/unittests/test_cloud_resources_bulk.py b/rest_api/rest_api_server/tests/unittests/test_cloud_resources_bulk.py index 280a542ac..86e87ac52 100644 --- a/rest_api/rest_api_server/tests/unittests/test_cloud_resources_bulk.py +++ b/rest_api/rest_api_server/tests/unittests/test_cloud_resources_bulk.py @@ -1,5 +1,6 @@ import uuid -from datetime import datetime, timezone +import tools.optscale_time as opttime +from datetime import datetime from unittest.mock import patch from freezegun import freeze_time @@ -130,7 +131,7 @@ def _to_discovered_resource(cad_resource, active=True): obj[field] = val obj.pop('resource_id', None) obj['resource_type'] = resource_type_map.get(model) - obj['last_seen'] = int(datetime.utcnow().timestamp()) + obj['last_seen'] = opttime.utcnow_timestamp() obj['active'] = active obj['meta'] = getattr(cad_resource, 'meta') return obj @@ -660,7 +661,7 @@ def test_update_existing_volume_attached(self): volume.attached = True resource = self._to_discovered_resource(volume) - cache_update_time1 = datetime.utcnow().timestamp() + REDISCOVER_TIME + 1 + cache_update_time1 = opttime.utcnow_timestamp() + REDISCOVER_TIME + 1 with freeze_time(datetime.fromtimestamp(cache_update_time1)): code, result = self.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, behavior='update_existing', @@ -717,7 +718,7 @@ def test_update_existing_instance_stopped_allocated(self): instance.stopped_allocated = False resource = self._to_discovered_resource(instance) - cache_update_time1 = datetime.utcnow().timestamp() + REDISCOVER_TIME + 1 + cache_update_time1 = opttime.utcnow_timestamp() + REDISCOVER_TIME + 1 with freeze_time(datetime.fromtimestamp(cache_update_time1)): code, result = self.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, @@ -777,7 +778,7 @@ def test_update_existing_stopped_allocated_is_none(self): instance.stopped_allocated = None resource = self._to_discovered_resource(instance) - with freeze_time(datetime.fromtimestamp(int(datetime.utcnow().timestamp()))): + with freeze_time(datetime.fromtimestamp(opttime.utcnow_timestamp())): code, result = self.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, behavior='update_existing', return_resources=True) @@ -789,7 +790,7 @@ def test_update_existing_stopped_allocated_is_none(self): instance.stopped_allocated = False resource = self._to_discovered_resource(instance) - cache_update_time1 = datetime.utcnow().timestamp() + REDISCOVER_TIME + 1 + cache_update_time1 = opttime.utcnow_timestamp() + REDISCOVER_TIME + 1 with freeze_time(datetime.fromtimestamp(cache_update_time1)): code, result = self.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, @@ -837,7 +838,7 @@ def test_resource_create_resource_without_stopped_allocated(self): # if in the first discover we got invalid status from Azure instance.stopped_allocated = None resource = self._to_discovered_resource(instance) - with freeze_time(datetime.fromtimestamp(int(datetime.utcnow().timestamp()))): + with freeze_time(datetime.fromtimestamp(opttime.utcnow_timestamp())): code, result = self.client.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, behavior='update_existing', return_resources=True) @@ -848,7 +849,7 @@ def test_resource_create_resource_without_stopped_allocated(self): result['resources'][0]['meta']['stopped_allocated'], False) instance.stopped_allocated = True resource = self._to_discovered_resource(instance) - cache_update_time1 = datetime.utcnow().timestamp() + REDISCOVER_TIME + 1 + cache_update_time1 = opttime.utcnow_timestamp() + REDISCOVER_TIME + 1 with freeze_time(datetime.fromtimestamp(cache_update_time1)): code, result = self.client.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, behavior='update_existing', @@ -860,7 +861,7 @@ def test_resource_create_resource_without_stopped_allocated(self): result['resources'][0]['meta']['stopped_allocated'], True) instance.stopped_allocated = False resource = self._to_discovered_resource(instance) - cache_update_time2 = datetime.utcnow().timestamp() + REDISCOVER_TIME + 1 + cache_update_time2 = opttime.utcnow_timestamp() + REDISCOVER_TIME + 1 with freeze_time(datetime.fromtimestamp(cache_update_time2)): code, result = self.client.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, @@ -873,7 +874,7 @@ def test_resource_create_resource_without_stopped_allocated(self): result['resources'][0]['meta']['stopped_allocated'], False) instance.stopped_allocated = True resource = self._to_discovered_resource(instance) - cache_update_time3 = datetime.utcnow().timestamp() + REDISCOVER_TIME + 1 + cache_update_time3 = opttime.utcnow_timestamp() + REDISCOVER_TIME + 1 with freeze_time(datetime.fromtimestamp(cache_update_time3)): code, result = self.client.cloud_resource_create_bulk( self.cloud_acc1_id, {'resources': [resource]}, @@ -1304,7 +1305,7 @@ def test_create_resources_threshold_fields(self): resource = self.valid_resource2.copy() resource['cloud_resource_id'] = 'some_resource_%s' % str(uuid.uuid4()) for field in fields: - resource[field] = int(datetime.utcnow().timestamp()) + resource[field] = opttime.utcnow_timestamp() valid_body = { 'resources': [ resource diff --git a/rest_api/rest_api_server/tests/unittests/test_cloud_resources_discovery.py b/rest_api/rest_api_server/tests/unittests/test_cloud_resources_discovery.py index 0ade3713b..7a83005c0 100644 --- a/rest_api/rest_api_server/tests/unittests/test_cloud_resources_discovery.py +++ b/rest_api/rest_api_server/tests/unittests/test_cloud_resources_discovery.py @@ -1,5 +1,5 @@ import uuid - +import tools.optscale_time as opttime from rest_api.rest_api_server.controllers.cluster_type import ClusterTypeController from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.models.db_factory import DBType, DBFactory @@ -401,7 +401,7 @@ def test_cache_time(self): self.assertEqual(code, 200) self.assertEqual(len(response['data']), 5) self.assertEqual(response['from_cache'], True) - now = datetime.utcnow() + now = opttime.utcnow() with freeze_time(now + timedelta(60)): self.resource_discovery_call( self.get_instances(), create_resources=False) @@ -520,7 +520,7 @@ def test_without_cached(self): self.assertEqual(code, 200) def test_first_seen_discover(self): - now = datetime.utcnow() + now = opttime.utcnow() with freeze_time(now): self.resource_discovery_call(self.get_instances()) code, response = self.client.cloud_resources_discover( @@ -558,7 +558,7 @@ def test_first_seen_discover(self): self.assertEqual(resp['first_seen'], int(past_date.timestamp())) def test_import_after_discover(self): - now = datetime.utcnow() + now = opttime.utcnow() with freeze_time(now): self.resource_discovery_call([self.get_instances()[0]]) code, response = self.client.cloud_resources_discover( @@ -604,7 +604,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, 'resource_type': resource_type, 'employee_id': employee_id, 'pool_id': pool_id, - 'last_seen': last_seen or int(datetime.utcnow().timestamp()), + 'last_seen': last_seen or opttime.utcnow_timestamp(), 'region': region } if tags: diff --git a/rest_api/rest_api_server/tests/unittests/test_cluster_types_api.py b/rest_api/rest_api_server/tests/unittests/test_cluster_types_api.py index d9a9f4bb0..579278493 100644 --- a/rest_api/rest_api_server/tests/unittests/test_cluster_types_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_cluster_types_api.py @@ -6,6 +6,7 @@ from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import encoded_tags +import tools.optscale_time as opttime class TestClusterTypesApi(TestApiBase): @@ -319,7 +320,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, 'resource_type': resource_type, 'employee_id': employee_id, 'pool_id': pool_id, - 'last_seen': last_seen or int(datetime.utcnow().timestamp()), + 'last_seen': last_seen or opttime.utcnow_timestamp(), 'region': region } if tags: @@ -781,13 +782,13 @@ def test_cluster_types_apply_dependent_constraint_cleanup(self): self.assertEqual(code, 201) self.resources_collection.bulk_write([UpdateMany( filter={'_id': resource['id']}, - update={'$set': {'last_seen': int(datetime.utcnow().timestamp() - 5), + update={'$set': {'last_seen': opttime.utcnow_timestamp() - 5, 'active': True}}, )]) code, constraint = self.client.resource_constraint_create( resource['id'], { - 'limit': int(datetime.utcnow().timestamp()) + 3600, + 'limit': opttime.utcnow_timestamp() + 3600, 'type': 'ttl' }) self.assertEqual(code, 201) diff --git a/rest_api/rest_api_server/tests/unittests/test_constraints.py b/rest_api/rest_api_server/tests/unittests/test_constraints.py index 61bec4318..4bf6875bc 100644 --- a/rest_api/rest_api_server/tests/unittests/test_constraints.py +++ b/rest_api/rest_api_server/tests/unittests/test_constraints.py @@ -1,5 +1,7 @@ from datetime import datetime, timedelta import time + +import tools.optscale_time as opttime from unittest.mock import patch, call, ANY from freezegun import freeze_time from pymongo import UpdateOne @@ -88,8 +90,7 @@ def mock_assign_resources(self, data): def get_expenses(self, instances): expenses_list = [] for instance in instances: - today = datetime.utcnow().replace( - hour=0, minute=0, second=0, microsecond=0) + today = opttime.startday(opttime.utcnow()) for dt, cost in [(today - timedelta(days=14), 2), (today, 3)]: expenses_list.append({ @@ -261,7 +262,7 @@ def test_environment_resource_constraint(self): self.update_expenses(self.get_expenses(resources)) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): code, resp = self.client.process_resource_violations(self.org_id) self.assertEqual(code, 204) @@ -298,7 +299,7 @@ def test_resource_constraint(self): self.update_expenses(self.get_expenses(resources)) constraints = self.create_resource_constraints(resources) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() self.assertEqual(self.m_activities.call_count, 2) @@ -330,7 +331,7 @@ def test_cluster_resource_constraint(self): constraints = self.create_resource_constraints([resources[0]], is_cluster=True) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() self.assertEqual(self.m_activities.call_count, 2) @@ -367,7 +368,7 @@ def test_resource_constraint_combi(self): self.update_expenses(self.get_expenses(resources)) self.create_resource_constraints(resources) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() # send alert to resource owner @@ -376,7 +377,7 @@ def test_resource_constraint_combi(self): def test_pool_constraint(self): self.m_recipients.return_value = self.get_auth_users([self.employee]) # initial - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): resources = self.discover_and_process_violations() @@ -409,7 +410,7 @@ def test_cluster_pool_constraint(self): self.m_recipients.return_value = self.get_auth_users([self.employee]) self.create_cluster_type() # initial - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): resources = self.discover_and_process_violations() @@ -452,7 +453,7 @@ def test_pool_constraint_different_owner(self): self.update_expenses(self.get_expenses(resources)) constraints = self.create_pool_constraints([self.pool_id]) # constraint hit. Expect alerts for employee and pool owner - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() self.assertEqual(self.m_activities.call_count, 2) @@ -473,7 +474,7 @@ def test_constraint_overlap(self): self.update_expenses(self.get_expenses(resources)) constraints = self.create_pool_constraints([self.pool_id], limit=1) # pool constraint hit - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() self.assertEqual(self.m_activities.call_count, 2) @@ -523,7 +524,7 @@ def test_pool_constraint_split_owner(self): # constraint hit. Expect: # - 3 alerts for employee 1 as a resource owner, # - 3 alerts for employee 2 as a resource owner, - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() self.assertEqual(self.m_activities.call_count, 3) @@ -552,7 +553,7 @@ def test_pool_constraint_pool_change(self): resources, self.org['pool_id'], self.employee_id) constraints = self.create_pool_constraints([self.org['pool_id']]) # constraint hit - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() # pool created + rule deactivated + 1 violated @@ -609,7 +610,7 @@ def test_pool_constraint_cross_ownership(self): # constraint hit. Expect: # - 4 alerts for employee as resource owner # - 4 alerts for employee2 as resource owner - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() # 2 pools created, 1 deactivated rule + 2 violated @@ -631,7 +632,7 @@ def test_resource_infinity_constraint(self): self.update_expenses(self.get_expenses(resources)) constraints = self.create_resource_constraints(resources, limit=0) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): self.discover_and_process_violations() self.assertEqual(self.m_activities.call_count, 1) @@ -639,7 +640,7 @@ def test_resource_infinity_constraint(self): def test_pool_constraint_with_zero_limit(self): self.m_recipients.return_value = self.get_auth_users([self.employee]) # initial - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): resources = self.discover_and_process_violations() @@ -670,7 +671,7 @@ def test_owner_constraint_activities_task(self): self.update_expenses(self.get_expenses(resources)) constraints = self.create_resource_constraints(resources) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() self.m_activities.reset_mock() with freeze_time(datetime.fromtimestamp(now + REDISCOVER_TIME - 1)): @@ -690,7 +691,7 @@ def test_disable_enable_constraint_types(self): self.update_expenses(self.get_expenses(resources)) self.create_resource_constraints(resources) # constraint hits - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() call_count = 1 for constraint_type in ConstraintTypes: call_count += 1 diff --git a/rest_api/rest_api_server/tests/unittests/test_context_api.py b/rest_api/rest_api_server/tests/unittests/test_context_api.py index 89cfb2ddc..3f2590fdd 100644 --- a/rest_api/rest_api_server/tests/unittests/test_context_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_context_api.py @@ -1,6 +1,5 @@ from unittest.mock import patch -from datetime import datetime - +from tools.optscale_time import utcnow_timestamp from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase @@ -52,7 +51,7 @@ def _create_resource(self, employee_id=None, pool_id=None, active=False): '_id': resource['id'] }, update={'$set': { - 'last_seen': int(datetime.utcnow().timestamp() - 1), + 'last_seen': utcnow_timestamp() - 1, 'active': True }} ) @@ -138,7 +137,7 @@ def test_resource_constraint_context(self): active=True, pool_id=self.organization['pool_id'], employee_id=self.employee['id']) code, constraint = self.client.resource_constraint_create( - res['id'], {'limit': int(datetime.utcnow().timestamp()) + 3600, + res['id'], {'limit': utcnow_timestamp() + 3600, 'type': 'ttl'}) self.assertEqual(code, 201) @@ -155,7 +154,7 @@ def test_resource_constraint_context(self): ) _, constraint = self.client.resource_constraint_create( res['id'], - {'limit': int(datetime.utcnow().timestamp()) + 3600, + {'limit': utcnow_timestamp() + 3600, 'type': 'ttl'}) code, context = self.client.context_get( 'resource_constraint', constraint['id']) diff --git a/rest_api/rest_api_server/tests/unittests/test_discovery_infos_api.py b/rest_api/rest_api_server/tests/unittests/test_discovery_infos_api.py index 5d965de87..6c57a26c5 100644 --- a/rest_api/rest_api_server/tests/unittests/test_discovery_infos_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_discovery_infos_api.py @@ -1,7 +1,6 @@ import uuid -from datetime import datetime from unittest.mock import patch - +from tools.optscale_time import utcnow_timestamp from tools.cloud_adapter.model import ResourceTypes from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase @@ -102,7 +101,7 @@ def test_list_discovery_info_environment(self): def test_update_discovery_info(self): _, res = self.client.discovery_info_list(self.cloud_acc_id) - some_time = int(datetime.utcnow().timestamp()) + some_time = utcnow_timestamp() for di_info in res['discovery_info']: code, res = self.client.discovery_info_update( di_info['id'], {'last_discovery_at': some_time, @@ -150,7 +149,7 @@ def test_update_discovery_info_cloud_acc(self): def test_update_discovery_info_nonexisting(self): code, res = self.client.discovery_info_update( str(uuid.uuid4()), - {'last_discovery_at': int(datetime.utcnow().timestamp())} + {'last_discovery_at': utcnow_timestamp()} ) self.assertEqual(code, 404) diff --git a/rest_api/rest_api_server/tests/unittests/test_employees.py b/rest_api/rest_api_server/tests/unittests/test_employees.py index c1e9f1881..f732a6dbc 100644 --- a/rest_api/rest_api_server/tests/unittests/test_employees.py +++ b/rest_api/rest_api_server/tests/unittests/test_employees.py @@ -9,6 +9,7 @@ from rest_api.rest_api_server.models.models import Employee, OrganizationLimitHit from sqlalchemy import and_ from tools.optscale_exceptions.http_exc import OptHTTPError +import tools.optscale_time as opttime class TestEmployeeApi(TestProfilingBase): @@ -220,7 +221,7 @@ def test_delete_invalid_new_owner_id(self): session = BaseDB.session(engine)() session.query(Employee).filter( Employee.id == deleted_emp['id']).update({ - 'deleted_at': int(datetime.utcnow().timestamp())}) + 'deleted_at': opttime.utcnow_timestamp()}) session.commit() invalid_owners = ['123', emp['id'], deleted_emp['id']] @@ -477,7 +478,7 @@ def test_delete_reassign_resources(self, p_assignment_list): 'cost': 11, 'cloud_account_id': cloud_acc['id'], 'resource_id': resource['id'], - 'date': datetime.utcnow() - timedelta(days=10), + 'date': opttime.utcnow() - timedelta(days=10), 'sign': 1 }) p_assignment_list.return_value = (200, []) diff --git a/rest_api/rest_api_server/tests/unittests/test_environment_resources_properties.py b/rest_api/rest_api_server/tests/unittests/test_environment_resources_properties.py index ed4f119d7..0f49de335 100644 --- a/rest_api/rest_api_server/tests/unittests/test_environment_resources_properties.py +++ b/rest_api/rest_api_server/tests/unittests/test_environment_resources_properties.py @@ -1,6 +1,3 @@ -import datetime -import os -import uuid import copy from unittest.mock import patch, ANY @@ -9,6 +6,7 @@ from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import encoded_map +from tools.optscale_time import utcnow class TestEnvironmentResourceApi(TestApiBase): @@ -180,7 +178,7 @@ def test_send_properties_without_autorization(self): def test_send_properties_double(self): init_props = {'some_field': 'some_value'} - now = datetime.datetime.utcnow() + now = utcnow() with freeze_time(now): code, _ = self.client.env_properties_send( self.env_resource_id, init_props) diff --git a/rest_api/rest_api_server/tests/unittests/test_expenses_api.py b/rest_api/rest_api_server/tests/unittests/test_expenses_api.py index 6195e33e1..7c98d098e 100644 --- a/rest_api/rest_api_server/tests/unittests/test_expenses_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_expenses_api.py @@ -12,6 +12,7 @@ from rest_api.rest_api_server.utils import get_nil_uuid from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow, utcnow_timestamp class TestExpensesApi(TestApiBase): @@ -120,7 +121,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, host_ip=None, instance_address=None, k8s_namespace=None, k8s_node=None, pod_ip=None, first_seen=None, k8s_service=None, service_name=None, resource_hash=None): - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource = { 'cloud_resource_id': self.gen_id(), 'name': name, @@ -161,7 +162,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, def add_recommendations(self, resource_id, modules, timestamp=None, last_check=None, pool_id=None, checklist=True): if not timestamp: - timestamp = int(datetime.utcnow().timestamp()) + timestamp = utcnow_timestamp() recommendations = { 'modules': modules, @@ -183,7 +184,7 @@ def add_recommendations(self, resource_id, modules, timestamp=None, updates = { 'recommendations': recommendations, 'active': True, - 'last_seen': int(datetime.utcnow().timestamp()) + 'last_seen': utcnow_timestamp() } if pool_id: updates['pool_id'] = pool_id @@ -2342,7 +2343,7 @@ def test_raw_expenses(self): self.cloud_acc2['id'], tags={'some': 'thing'}, region='us-test') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -2410,7 +2411,7 @@ def test_raw_expenses_limit(self): self.cloud_acc1['id'], tags={'tag': 'val'}, region='us-east') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 450, 'date': dt, @@ -2475,7 +2476,7 @@ def test_raw_expenses_by_hash(self): self.cloud_acc1['id'], tags={'tag': 'val'}, region='us-east', resource_hash='hash') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 450, 'date': dt, @@ -2535,7 +2536,7 @@ def test_raw_expenses_format(self): self.cloud_acc1['id'], tags={'tag': 'val'}, region='us-east') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 450, 'date': dt, @@ -3124,7 +3125,7 @@ def test_summary_clean_expenses_filter_by_empty_tag(self): self.assertEqual(response['total_count'], 0) def test_summary_clean_expenses_invalid_limit(self): - time = int(datetime.utcnow().timestamp()) + time = utcnow_timestamp() code, response = self.client.clean_expenses_get( self.org_id, time, time + 1, {'limit': -1}) self.assertEqual(code, 400) @@ -3420,8 +3421,8 @@ def test_clean_expenses_deleted_org(self): self.client.pool_delete(self.sub_pool1['id']) self.client.organization_delete(self.org_id) code, response = self.client.clean_expenses_get( - self.org_id, int(datetime.utcnow().timestamp()) - 1000, - int(datetime.utcnow().timestamp()), {}) + self.org_id, utcnow_timestamp() - 1000, + utcnow_timestamp(), {}) self.assertEqual(code, 404) def test_summary_expenses_deleted_org(self): @@ -3431,8 +3432,8 @@ def test_summary_expenses_deleted_org(self): self.client.pool_delete(self.sub_pool1['id']) self.client.organization_delete(self.org_id) code, response = self.client.summary_expenses_get( - self.org_id, int(datetime.utcnow().timestamp()) - 1000, - int(datetime.utcnow().timestamp()), {}) + self.org_id, utcnow_timestamp() - 1000, + utcnow_timestamp(), {}) self.assertEqual(code, 404) def test_raw_expenses_deleted_org(self): @@ -3442,18 +3443,18 @@ def test_raw_expenses_deleted_org(self): self.client.pool_delete(self.sub_pool1['id']) self.client.organization_delete(self.org_id) code, response = self.client.raw_expenses_get( - self.org_id, int(datetime.utcnow().timestamp()) - 1000, - int(datetime.utcnow().timestamp()), {}) + self.org_id, utcnow_timestamp() - 1000, + utcnow_timestamp(), {}) self.assertEqual(code, 404) def test_raw_expenses_nonexistent_resource(self): code, response = self.client.raw_expenses_get( - str(uuid.uuid4()), int(datetime.utcnow().timestamp()) - 1000, - int(datetime.utcnow().timestamp()), {}) + str(uuid.uuid4()), utcnow_timestamp() - 1000, + utcnow_timestamp(), {}) self.assertEqual(code, 404) code, response = self.client.raw_expenses_get( - get_nil_uuid(), int(datetime.utcnow().timestamp()) - 1000, - int(datetime.utcnow().timestamp()), {}) + get_nil_uuid(), utcnow_timestamp() - 1000, + utcnow_timestamp(), {}) self.assertEqual(code, 404) def test_region_expenses_no_cloud_accs(self): @@ -3598,7 +3599,7 @@ def test_summary_clean_expenses_recommendations_and_tags(self): resource, resource2, resource3, resource4 ] } - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -3735,7 +3736,7 @@ def test_clean_expenses_kubernetes_fields(self): instance_address='10.24.0.2', k8s_namespace='default', k8s_node='node_test1', pod_ip='10.24.1.3', k8s_service='monitoring-nginx') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 300, 'date': dt, @@ -4012,7 +4013,7 @@ def test_summary_clean_expenses_clusters(self): self.cloud_acc1['id'], tags={'some': 'tag'}, region='us-test') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4231,7 +4232,7 @@ def test_clean_expenses_shareables(self): ) code, resource2 = self.create_cloud_resource( self.cloud_acc2['id'], region='us-east') - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4274,7 +4275,7 @@ def test_summary_clean_expenses_environments(self): self.org_id, {'name': 'res3', 'resource_type': 'some_type'}) self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4408,7 +4409,7 @@ def test_summary_clean_expenses_deleted_environments(self): self.org_id, {'name': 'res3', 'resource_type': 'some_type'}) self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4481,7 +4482,7 @@ def test_summary_clean_expenses_clusters_filter_by_tag(self): self.cloud_acc1['id'], tags={'some': 'tag'}, region='us-test') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4552,7 +4553,7 @@ def test_raw_expenses_clusters(self): self.cloud_acc1['id'], tags={'some': 'tag'}, region='us-test') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4687,7 +4688,7 @@ def test_raw_expenses_environments(self): self.org_id, {'name': 'res2', 'resource_type': 'some_type'}) self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4757,7 +4758,7 @@ def test_raw_expenses_deleted_environments(self): self.org_id, {'name': 'res2', 'resource_type': 'some_type'}) self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -4879,7 +4880,7 @@ def test_clean_expenses_invalid_filters(self): @freeze_time('2021-09-13 15:00:00') def test_get_old_active_clean_expenses(self): - now = datetime.utcnow() + now = utcnow() now_ts = int(now.timestamp()) today = now.replace(hour=0, minute=0, second=0) previuos_month_day = today.replace(month=today.month - 1, day=1) @@ -5013,7 +5014,7 @@ def test_clean_expenses_resource_type_identity(self): code, env_resource = self.client.environment_resource_create( self.org_id, {'name': 'some_name', 'resource_type': 'Instance'}) self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 10, 'date': dt, @@ -5388,7 +5389,7 @@ def test_region_expenses_duplicated_regions( self.cloud_acc2['id'], region='global') self.assertEqual(code, 201) - dt = datetime.utcnow() + dt = utcnow() expenses = [ { 'cost': 150, 'date': dt, @@ -5424,7 +5425,7 @@ def test_region_expenses_duplicated_regions( p_azure_regions_map.return_value = { 'global': {'longitude': 3, 'latitude': 3} } - end_date = datetime.utcnow() + end_date = utcnow() start_date = end_date - timedelta(days=1) code, resp = self.client.region_expenses_get( diff --git a/rest_api/rest_api_server/tests/unittests/test_infrastructure_base.py b/rest_api/rest_api_server/tests/unittests/test_infrastructure_base.py index 7d1f5a04b..975446b54 100644 --- a/rest_api/rest_api_server/tests/unittests/test_infrastructure_base.py +++ b/rest_api/rest_api_server/tests/unittests/test_infrastructure_base.py @@ -8,6 +8,7 @@ from rest_api.rest_api_server.controllers.infrastructure.base import get_cost from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.tests.unittests.test_profiling_base import ArceeMock +from tools.optscale_time import utcnow_timestamp def get_http_error(code): @@ -150,14 +151,14 @@ def _gen_dataset(self, token, **kwargs): dataset = { '_id': str(uuid.uuid4()), 'path': str(uuid.uuid4()), - 'name': f'Dataset {datetime.datetime.utcnow().timestamp()}', + 'name': f'Dataset {utcnow_timestamp()}', 'description': 'Discovered in training - ()', 'labels': ['test'], - 'created_at': int(datetime.datetime.utcnow().timestamp()), + 'created_at': int(utcnow_timestamp()), 'deleted_at': 0, 'token': token, - 'timespan_from': int(datetime.datetime.utcnow().timestamp()), - 'timespan_to': int(datetime.datetime.utcnow().timestamp()) + 'timespan_from': utcnow_timestamp(), + 'timespan_to': utcnow_timestamp() } if kwargs: dataset.update(kwargs) @@ -353,7 +354,7 @@ def runset_create(self, template_id, task_id, cloud_account_id, region_id, instance_type, name_prefix, owner_id, hyperparameters, tags, destroy_conditions, commands, open_ingress=False, spot_settings=None): - now = int(datetime.datetime.utcnow().timestamp()) + now = utcnow_timestamp() b = { "template_id": template_id, "task_id": task_id, @@ -409,7 +410,7 @@ def runset_list(self, template_id): return 200, runsets def __generate_runners(self, runset_ids): - now = int(datetime.datetime.utcnow().timestamp()) + now = utcnow_timestamp() runsets = list(self.infra_runsets.find({'_id': {'$in': runset_ids}})) # TODO: (am) complex runs generation based on runset hyperparameters inserted_ids = [] diff --git a/rest_api/rest_api_server/tests/unittests/test_infrastructure_runs.py b/rest_api/rest_api_server/tests/unittests/test_infrastructure_runs.py index 096aab8ae..07285c848 100644 --- a/rest_api/rest_api_server/tests/unittests/test_infrastructure_runs.py +++ b/rest_api/rest_api_server/tests/unittests/test_infrastructure_runs.py @@ -4,6 +4,7 @@ from rest_api.rest_api_server.tests.unittests.test_infrastructure_base import ( TestInfrastructureBase) +from tools.optscale_time import utcnow_timestamp class TestRunsApi(TestInfrastructureBase): @@ -21,7 +22,7 @@ def test_list_insider_cost(self): code, _ = self.client.runset_list( self.organization_id, self.template_id) self.assertEqual(code, 200) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self._create_run( self.organization_id, self.task_id, self.runset_id, [self.instance_id], start=now - 60, finish=now - 10) @@ -46,7 +47,7 @@ def test_list_deleted_task(self): code, _ = self.client.runset_list( self.organization_id, self.template_id) self.assertEqual(code, 200) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self._create_run( self.organization_id, self.task_id, self.runset_id, [self.instance_id], start=now - 60, finish=now - 10) @@ -59,7 +60,7 @@ def test_list_dataset(self): code, _ = self.client.runset_list( self.organization_id, self.template_id) self.assertEqual(code, 200) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self._create_run( self.organization_id, self.task_id, self.runset_id, [self.instance_id], 's3://ml-bucket/dataset', @@ -74,7 +75,7 @@ def test_list_deleted_dataset(self): code, _ = self.client.runset_list( self.organization_id, self.template_id) self.assertEqual(code, 200) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self._create_run( self.organization_id, self.task_id, self.runset_id, [self.instance_id], 's3://ml-bucket/dataset', @@ -99,7 +100,7 @@ def test_list_deleted_ca(self): code, _ = self.client.runset_list( self.organization_id, self.template_id) self.assertEqual(code, 200) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self._create_run( self.organization_id, self.task_id, self.runset_id, [self.instance_id], start=now - 60, finish=now - 10) @@ -112,7 +113,7 @@ def test_list_raw_cost(self): code, _ = self.client.runset_list( self.organization_id, self.template_id) self.assertEqual(code, 200) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() run_start = now - 60 run_end = now - 10 self._create_run( diff --git a/rest_api/rest_api_server/tests/unittests/test_k8s_rightsizing.py b/rest_api/rest_api_server/tests/unittests/test_k8s_rightsizing.py index f76da950c..028db8dcc 100644 --- a/rest_api/rest_api_server/tests/unittests/test_k8s_rightsizing.py +++ b/rest_api/rest_api_server/tests/unittests/test_k8s_rightsizing.py @@ -1,8 +1,8 @@ import uuid -from datetime import datetime from unittest.mock import patch from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestK8sRightsizing(TestApiBase): @@ -32,13 +32,13 @@ def setUp(self, version='v2'): _, self.cloud_acc1 = self.create_cloud_account( self.org_id, self.valid_kubernetes_cloud_acc, auth_user_id=self.auth_user_id_1) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self.base_payload = { 'start_date': now, 'end_date': now + 12345} def test_metrics_invalid_dates(self): validation_params = [ - ('start_date', int(datetime.utcnow().timestamp()) + 1000000, + ('start_date', utcnow_timestamp() + 1000000, 'OE0446'), ('start_date', 'aaa', 'OE0217'), ('start_date', None, 'OE0216'), diff --git a/rest_api/rest_api_server/tests/unittests/test_live_demos_api.py b/rest_api/rest_api_server/tests/unittests/test_live_demos_api.py index 811249afe..88866ee35 100644 --- a/rest_api/rest_api_server/tests/unittests/test_live_demos_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_live_demos_api.py @@ -2,7 +2,6 @@ import time import uuid from copy import deepcopy -from datetime import datetime from unittest.mock import patch, PropertyMock import optscale_client.rest_api_client @@ -16,6 +15,7 @@ ResourceConstraint, PoolPolicy, OrganizationConstraint, OrganizationLimitHit) from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +import tools.optscale_time as opttime PRESET_CLOUD_RESOURCE_ID = "sunflower-eu-fra" @@ -1980,7 +1980,7 @@ def pregenerate_live_demo(self): return_value=deepcopy(self.preset)): code, response = self.client.live_demo_create() self.assertEqual(code, 201) - response['created_at'] = int(datetime.utcnow().timestamp()) + response['created_at'] = opttime.utcnow_timestamp() self.mongo_client.restapi.live_demos.insert_one(response) return response @@ -2030,9 +2030,9 @@ def test_live_demo_org_constraint_create(self): self.assertEqual(len(filters[f]), 1) self.assertIsNotNone(filters[f][0]) - start_date = int(datetime.utcnow().replace( - hour=0, minute=0, second=0).timestamp()) - self.preset[ - 'organization_constraint'][2]['definition.start_date_offset'] + start_date = int( + opttime.startday(opttime.utcnow()).timestamp() + ) - self.preset['organization_constraint'][2]['definition.start_date_offset'] total_budget = self.preset['organization_constraint'][2][ 'definition']['total_budget'] * self.multiplier definition = {'total_budget': total_budget, 'start_date': start_date} diff --git a/rest_api/rest_api_server/tests/unittests/test_mytasks_api.py b/rest_api/rest_api_server/tests/unittests/test_mytasks_api.py index e91435f48..6ca7b8ce0 100644 --- a/rest_api/rest_api_server/tests/unittests/test_mytasks_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_mytasks_api.py @@ -10,6 +10,7 @@ from rest_api.rest_api_server.models.db_base import BaseDB from rest_api.rest_api_server.models.models import ConstraintLimitHit from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp, utcnow class TestMyTasksApi(TestApiBase): @@ -99,7 +100,7 @@ def set_active_flag(self, resource_ids): '_id': {'$in': resource_ids} }, update={'$set': { - 'last_seen': int(datetime.datetime.utcnow().timestamp() - 1), + 'last_seen': utcnow_timestamp() - 1, 'active': True }} ) @@ -158,7 +159,7 @@ def add_outgoing_assignment_request(self, resource_id=None): def add_expense_records(self, pool_id=None, cost=1, date=None, owner_id=None): if not date: - date = datetime.datetime.utcnow() + date = utcnow() date = date.replace(hour=0, minute=0, second=0, microsecond=0) if not owner_id: owner_id = self.employee['id'] @@ -520,7 +521,7 @@ def test_get_my_tasks_constraints(self): self.org['pool_id'], type='ttl') self.assertEqual(code, 201) - now = int(datetime.datetime.utcnow().timestamp()) + now = utcnow_timestamp() code, constr_1 = self._create_resource_constraint( res_1_id, type='ttl', limit=now + 3600) self.assertEqual(code, 201) @@ -760,7 +761,7 @@ def test_remove_constraint_with_policy_after_hit(self): code, tasks = self.client.my_tasks_get(self.org_id, types=[ 'violated_constraints', 'differ_constraints']) self.assertIsNone(tasks.get('violated_constraints')) - created_at = datetime.datetime.utcnow().timestamp() + created_at = utcnow_timestamp() with freeze_time(datetime.datetime.fromtimestamp(created_at + 1)): self._add_constraint_limit_hit( res_1_id, pool_id=self.org['pool_id']) diff --git a/rest_api/rest_api_server/tests/unittests/test_optimizations_api.py b/rest_api/rest_api_server/tests/unittests/test_optimizations_api.py index f70f69cba..52420024c 100644 --- a/rest_api/rest_api_server/tests/unittests/test_optimizations_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_optimizations_api.py @@ -6,6 +6,7 @@ from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.models.db_factory import DBFactory, DBType from rest_api.rest_api_server.models.db_base import BaseDB +from tools.optscale_time import utcnow_timestamp GET_OPTIMIZATIONS_DATA = ('rest_api.rest_api_server.controllers.optimization.' @@ -86,7 +87,7 @@ def test_optimization(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' p_get_optimizations_data.return_value = [ { @@ -127,7 +128,7 @@ def test_optimization_cloud_acc(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' p_get_optimizations_data.return_value = [ { @@ -164,7 +165,7 @@ def test_optimization_cloud_acc_detailed(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' p_get_optimizations_data.return_value = [ { @@ -201,7 +202,7 @@ def test_suppressed_optimization(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' self.instance['is_dismissed'] = True p_get_optimizations_data.return_value = [ @@ -235,7 +236,7 @@ def test_optimization_detailed(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' options = {'key': 'value'} p_get_optimizations_data.return_value = [ @@ -268,7 +269,7 @@ def test_suppressed_optimization_detailed(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' self.instance['is_dismissed'] = True p_get_optimizations_data.return_value = [ @@ -307,7 +308,7 @@ def test_optimization_no_saving(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' self.instance.pop('saving') p_get_optimizations_data.return_value = [ @@ -337,7 +338,7 @@ def test_optimization_error(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' error_message = 'Failed to collect optimization data' options = {'key': 'value'} @@ -383,7 +384,7 @@ def test_optimization_unhandled_error(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' error_message = 'Failed to collect optimization data' p_get_optimizations_data.return_value = [ @@ -428,7 +429,7 @@ def test_optimization_timeout_error(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' error_message = 'Failed to collect optimization data' p_get_optimizations_data.return_value = [ @@ -459,7 +460,7 @@ def test_optimization_several_modules(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module1 = 'module1' module2 = 'module2' p_get_optimizations_data.return_value = [ @@ -537,7 +538,7 @@ def test_combi_optimization(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' self.instance['is_dismissed'] = True p_get_optimizations_data.return_value = [ @@ -614,7 +615,7 @@ def test_optimization_limit_detailed(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' opt_data = [self.instance, self.instance2] module_saving = self.instance['saving'] + self.instance2['saving'] @@ -646,7 +647,7 @@ def test_optimization_detailed_no_data(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' opt_data = [] options = {'key': 'value'} @@ -676,7 +677,7 @@ def test_optimization_limit_detailed_no_saving(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' self.instance.pop('saving') self.instance2.pop('saving') @@ -708,7 +709,7 @@ def test_optimization_limit_several_modules(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module1 = 'module1' module2 = 'module2' options1 = {'key1': 'value1'} @@ -757,7 +758,7 @@ def test_optimization_limit_error(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module = 'module' self.instance.pop('saving') error_message = 'Hey, gde kurab\'e?!!1' @@ -790,7 +791,7 @@ def test_optimization_overview(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module1 = 'module1' module2 = 'module2' options1 = {'key1': 'value1'} @@ -839,7 +840,7 @@ def test_optimization_overview_limit(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() module1 = 'module1' module2 = 'module2' options1 = {'key1': 'value1'} @@ -943,7 +944,7 @@ def _create_cloud_resource(self, cloud_acc_id, params): '_id': resource['id'] }, update={'$set': { - 'last_seen': int(datetime.utcnow().timestamp() - 1), + 'last_seen': utcnow_timestamp() - 1, 'active': True }} ) @@ -952,7 +953,7 @@ def _create_cloud_resource(self, cloud_acc_id, params): @staticmethod def _add_checklist(organization_id, timestamp=None): if not timestamp: - timestamp = int(datetime.utcnow().timestamp()) + timestamp = utcnow_timestamp() db = DBFactory(DBType.Test, None).db engine = db.engine session = BaseDB.session(engine)() @@ -1379,7 +1380,7 @@ def test_suppress_optimization_events(self): def test_optimization_cloud_accounts(self, p_get_optimizations_data): _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] - completed_at = int(datetime.utcnow().timestamp()) + completed_at = utcnow_timestamp() self.instance2.update({ 'cloud_account_id': self.cloud_acc2['id'], 'cloud_type': self.cloud_acc2['type'], diff --git a/rest_api/rest_api_server/tests/unittests/test_organization_constraints.py b/rest_api/rest_api_server/tests/unittests/test_organization_constraints.py index 1d6e050e2..86aef9df2 100644 --- a/rest_api/rest_api_server/tests/unittests/test_organization_constraints.py +++ b/rest_api/rest_api_server/tests/unittests/test_organization_constraints.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import timedelta from unittest.mock import patch from sqlalchemy import and_ @@ -9,6 +9,7 @@ OrganizationLimitHit) from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import get_nil_uuid +import tools.optscale_time as opttime class TestOrganizationConstraints(TestApiBase): @@ -66,7 +67,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, host_ip=None, instance_address=None, k8s_namespace=None, k8s_node=None, pod_ip=None, first_seen=None, k8s_service=None, service_name=None): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() resource = { 'cloud_resource_id': self.gen_id(), 'name': name, @@ -777,7 +778,7 @@ def test_list_invalid_type(self): def test_list_constraints_with_hit_days(self): constr = self.create_org_constraint(self.org_id, self.pool_id) - created_at = int((datetime.utcnow() - timedelta(days=2)).timestamp()) + created_at = int((opttime.utcnow() - timedelta(days=2)).timestamp()) old_hit = self.create_org_limit_hit(self.org_id, self.pool_id, constraint_id=constr['id'], created_at=created_at) @@ -894,7 +895,7 @@ def _test(case): last_run_result = {'average': 0, 'today': 0, 'breakdown': {}} _test(last_run_result) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() average = 2 last_run_result = { 'average': average, 'today': 20, @@ -920,7 +921,7 @@ def test_patch_anomaly_last_last_run_result_invalid(self): OrganizationConstraintTypes.EXPENSE_ANOMALY.value: ( ['test', [1.2]], {'today': 'OE0466', 'average': 'OE0466'}), } - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() for type_ in [OrganizationConstraintTypes.RESOURCE_COUNT_ANOMALY.value, OrganizationConstraintTypes.EXPENSE_ANOMALY.value]: constr = self.create_org_constraint( @@ -948,7 +949,7 @@ def test_patch_anomaly_last_last_run_result_unexpected_breakdown(self): assertion_message = 'Error on %s constraint update' % type_ last_run_result = { 'average': 0, 'today': 0, - 'breakdown': {int(datetime.utcnow().timestamp()): 123} + 'breakdown': {opttime.utcnow_timestamp(): 123} } code, resp = self.client.organization_constraint_update( constr['id'], {'last_run_result': last_run_result}) @@ -1149,7 +1150,7 @@ def test_organization_constraints_traffic_filters(self): { 'cloud_account_id': self.cloud_acc['id'], 'resource_id': res1['cloud_resource_id'], - 'date': int(datetime.utcnow().timestamp()), + 'date': opttime.utcnow_timestamp(), 'type': 1, 'from': 'region_2', 'to': 'External', diff --git a/rest_api/rest_api_server/tests/unittests/test_organization_limit_hits.py b/rest_api/rest_api_server/tests/unittests/test_organization_limit_hits.py index 9a9b8fcc0..cc1aca8d6 100644 --- a/rest_api/rest_api_server/tests/unittests/test_organization_limit_hits.py +++ b/rest_api/rest_api_server/tests/unittests/test_organization_limit_hits.py @@ -2,6 +2,7 @@ from datetime import datetime from freezegun import freeze_time from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestOrganizationLimitHits(TestApiBase): @@ -57,7 +58,7 @@ def test_create_limit_hit_incorrect_run_result(self): def test_create_limit_hit_with_created_at(self): params = self.valid_hit_params.copy() - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() params['created_at'] = now code, resp = self.client.organization_limit_hit_create( self.org_id, params) @@ -228,7 +229,7 @@ def test_update_hit(self): value = 44.44 run_result = { 'average': 10, 'today': value, - 'breakdown': {int(datetime.utcnow().timestamp()): 10} + 'breakdown': {utcnow_timestamp(): 10} } params = {"constraint_limit": limit, "value": value, 'run_result': run_result} diff --git a/rest_api/rest_api_server/tests/unittests/test_organizations_overview_api.py b/rest_api/rest_api_server/tests/unittests/test_organizations_overview_api.py index 5be9c7693..c751e30f6 100644 --- a/rest_api/rest_api_server/tests/unittests/test_organizations_overview_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_organizations_overview_api.py @@ -4,6 +4,7 @@ from freezegun import freeze_time from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow class TestOrganizationsOverviewApi(TestApiBase): @@ -206,7 +207,7 @@ def test_organizations_overview_recommendations(self): self.org_id, self.valid_cloud_acc_dict, auth_user_id=self.auth_user) - completed_at = datetime.utcnow() + completed_at = utcnow() completed_at_ts = int(completed_at.timestamp()) _, res = self.client.optimizations_get(self.org_id) checklist_id = res['id'] diff --git a/rest_api/rest_api_server/tests/unittests/test_pools.py b/rest_api/rest_api_server/tests/unittests/test_pools.py index b00efd718..61126f49f 100644 --- a/rest_api/rest_api_server/tests/unittests/test_pools.py +++ b/rest_api/rest_api_server/tests/unittests/test_pools.py @@ -10,6 +10,7 @@ from freezegun import freeze_time from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestPoolApi(TestApiBase): @@ -52,7 +53,7 @@ def _create_resource(self, cloud_account_id, employee_id=None, def add_recommendations(self, resource_id, modules, timestamp=None, last_check=None, pool_id=None, checklist=True): if not timestamp: - timestamp = int(datetime.utcnow().timestamp()) + timestamp = utcnow_timestamp() recommendations = { 'modules': modules, @@ -71,7 +72,7 @@ def add_recommendations(self, resource_id, modules, timestamp=None, ) session.add(record) session.commit() - last_seen = int(datetime.utcnow().timestamp()) + last_seen = utcnow_timestamp() self.resources_collection.update_one( filter={ '_id': resource_id @@ -752,7 +753,7 @@ def test_pool_details_recommendations_subpools(self): 'config_scheme': 'create_report' } } - checklist_timestamp = int(datetime.utcnow().timestamp()) + checklist_timestamp = utcnow_timestamp() _, cloud_account = self.create_cloud_account( self.org_id, cloud, auth_user_id=self.auth_user_1) code, resource = self.cloud_resource_create( diff --git a/rest_api/rest_api_server/tests/unittests/test_power_schedule.py b/rest_api/rest_api_server/tests/unittests/test_power_schedule.py index b071ebfba..d5827298b 100644 --- a/rest_api/rest_api_server/tests/unittests/test_power_schedule.py +++ b/rest_api/rest_api_server/tests/unittests/test_power_schedule.py @@ -2,6 +2,7 @@ from unittest.mock import patch from freezegun import freeze_time from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestPowerSchedule(TestApiBase): @@ -50,7 +51,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, pool_id=None, resource_type='Instance', name='test_resource', power_schedule=None, active=False): - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource = { 'cloud_resource_id': self.gen_id(), 'name': name, @@ -247,7 +248,7 @@ def test_update_invalid_params(self): 'power_off': '11:44', 'power_on': '23:59', 'timezone': 'Europe/Vienna', - 'start_date': int(datetime.utcnow().timestamp()), + 'start_date': utcnow_timestamp(), 'name': 'my schedule 1', 'enabled': False, 'last_eval': 12, diff --git a/rest_api/rest_api_server/tests/unittests/test_profiling_datasets.py b/rest_api/rest_api_server/tests/unittests/test_profiling_datasets.py index 86a127dce..f9a768e99 100644 --- a/rest_api/rest_api_server/tests/unittests/test_profiling_datasets.py +++ b/rest_api/rest_api_server/tests/unittests/test_profiling_datasets.py @@ -5,6 +5,7 @@ from rest_api.rest_api_server.tests.unittests.test_profiling_base import ( TestProfilingBase) +from tools.optscale_time import utcnow_timestamp class TestDatasetApi(TestProfilingBase): @@ -45,8 +46,7 @@ def test_create(self): def test_create_incorrect_timespan(self): valid_dataset = self.valid_dataset.copy() - valid_dataset['timespan_from'] = int( - datetime.datetime.utcnow().timestamp()) + 100 + valid_dataset['timespan_from'] = utcnow_timestamp() + 100 code, dataset = self.client.dataset_create( self.organization_id, valid_dataset) self.assertEqual(code, 400) diff --git a/rest_api/rest_api_server/tests/unittests/test_profiling_executors.py b/rest_api/rest_api_server/tests/unittests/test_profiling_executors.py index 2c18aa198..05d067968 100644 --- a/rest_api/rest_api_server/tests/unittests/test_profiling_executors.py +++ b/rest_api/rest_api_server/tests/unittests/test_profiling_executors.py @@ -1,4 +1,3 @@ -from datetime import datetime from unittest.mock import patch from tools.optscale_exceptions.http_exc import OptHTTPError @@ -6,6 +5,7 @@ from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.tests.unittests.test_profiling_base import ( TestProfilingBase) +import tools.optscale_time as opttime BYTES_IN_MB = 1024 * 1024 @@ -87,7 +87,7 @@ def test_executors_breakdown_params(self): code, task2 = self.client.task_create( self.org['id'], {'name': 'pr_2', 'key': 'k_2'}) self.assertEqual(code, 201) - run_1_start_ts = int(datetime.utcnow().timestamp()) + run_1_start_ts = opttime.utcnow_timestamp() run_2_start_ts = run_1_start_ts - 24 * 3600 run_3_start_ts = run_1_start_ts - 2 * 24 * 3600 self._create_run( diff --git a/rest_api/rest_api_server/tests/unittests/test_profiling_optimizations.py b/rest_api/rest_api_server/tests/unittests/test_profiling_optimizations.py index 6ce6ee968..1f6e4edf9 100644 --- a/rest_api/rest_api_server/tests/unittests/test_profiling_optimizations.py +++ b/rest_api/rest_api_server/tests/unittests/test_profiling_optimizations.py @@ -1,10 +1,10 @@ import uuid from rest_api.rest_api_server.tests.unittests.test_profiling_base import TestProfilingBase from unittest.mock import patch -from datetime import datetime from rest_api.rest_api_server.models.db_factory import DBFactory, DBType from rest_api.rest_api_server.models.db_base import BaseDB from rest_api.rest_api_server.models.models import Checklist +from tools.optscale_time import utcnow_timestamp class TestProfilingOptimizationsApi(TestProfilingBase): @@ -76,7 +76,7 @@ def test_task_recommendations(self): code, task = self.client.task_create( self.org['id'], self.valid_task) self.assertEqual(code, 201) - dt = int(datetime.utcnow().timestamp()) + dt = utcnow_timestamp() checklist = self.add_checklist(self.org['id'], dt) ca_id = str(uuid.uuid4()) data = [ @@ -158,7 +158,7 @@ def test_task_recommendations_dismissed(self): code, task = self.client.task_create( self.org['id'], self.valid_task) self.assertEqual(code, 201) - dt = int(datetime.utcnow().timestamp()) + dt = utcnow_timestamp() checklist = self.add_checklist(self.org['id'], dt) ca_id = str(uuid.uuid4()) data = [ @@ -214,7 +214,7 @@ def test_task_optimizations_threshold(self): code, task = self.client.task_create( self.org['id'], self.valid_task) self.assertEqual(code, 201) - dt = int(datetime.utcnow().timestamp()) + dt = utcnow_timestamp() checklist = self.add_checklist(self.org['id'], dt) ca_id = str(uuid.uuid4()) data = [ @@ -258,7 +258,7 @@ def test_task_optimizations_excluded_and_dismissed(self): code, task = self.client.task_create( self.org['id'], self.valid_task) self.assertEqual(code, 201) - dt = int(datetime.utcnow().timestamp()) + dt = utcnow_timestamp() checklist = self.add_checklist(self.org['id'], dt) ca_id = str(uuid.uuid4()) data = [ diff --git a/rest_api/rest_api_server/tests/unittests/test_profiling_runs.py b/rest_api/rest_api_server/tests/unittests/test_profiling_runs.py index d96e439aa..d6aab154f 100644 --- a/rest_api/rest_api_server/tests/unittests/test_profiling_runs.py +++ b/rest_api/rest_api_server/tests/unittests/test_profiling_runs.py @@ -8,6 +8,7 @@ from rest_api.rest_api_server.exceptions import Err from rest_api.rest_api_server.tests.unittests.test_profiling_base import ( TestProfilingBase) +import tools.optscale_time as opttime BYTES_IN_MB = 1024 * 1024 @@ -380,7 +381,7 @@ def test_task_run_cost(self): self.assertEqual(resp['last_run_cost'], 185) self.assertEqual(resp['total_cost'], 185) - dt_start = datetime.utcnow() - timedelta(days=15) + dt_start = opttime.utcnow() - timedelta(days=15) self._create_run( self.org['id'], task['id'], [res_1['cloud_resource_id']], @@ -804,7 +805,7 @@ def test_breakdown_metrics_aggregate_func(self): self.org['id'], valid_task) self.assertEqual(code, 201) self.assertEqual(len(task['metrics']), 4) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() run = self._create_run(self.org['id'], task['id'], ['i-1'], start=now - 2, finish=now) for dt, val in [ diff --git a/rest_api/rest_api_server/tests/unittests/test_profiling_tasks.py b/rest_api/rest_api_server/tests/unittests/test_profiling_tasks.py index e8cab52f4..d1dce1985 100644 --- a/rest_api/rest_api_server/tests/unittests/test_profiling_tasks.py +++ b/rest_api/rest_api_server/tests/unittests/test_profiling_tasks.py @@ -6,6 +6,7 @@ from freezegun import freeze_time from rest_api.rest_api_server.tests.unittests.test_profiling_base import ( TestProfilingBase) +from tools.optscale_time import utcnow_timestamp class TestTaskApi(TestProfilingBase): @@ -300,7 +301,7 @@ def test_task_executors(self): cloud_acc['id'], body, behavior='skip_existing', return_resources=True) self.assertEqual(code, 200) - now = datetime.utcnow().timestamp() + now = utcnow_timestamp() # create 2nd run earlier then 1st one self._create_run(self.org['id'], task['id'], ['i-1'], start=now - 2, finish=now) @@ -360,7 +361,7 @@ def test_task_last_run_and_history(self): self.org['id'], self.valid_task) self.assertEqual(code, 201) self.assertEqual(len(task['metrics']), 1) - now = datetime.utcnow().timestamp() + now = utcnow_timestamp() self._create_run(self.org['id'], task['id'], ['i-1'], start=now - 2, finish=now, data={'loss': 10}) # second created run should be first diff --git a/rest_api/rest_api_server/tests/unittests/test_report_imports.py b/rest_api/rest_api_server/tests/unittests/test_report_imports.py index fe1361d24..20608b32f 100644 --- a/rest_api/rest_api_server/tests/unittests/test_report_imports.py +++ b/rest_api/rest_api_server/tests/unittests/test_report_imports.py @@ -1,5 +1,4 @@ -import os -from datetime import datetime, timedelta +from datetime import timedelta from unittest.mock import patch, ANY from rest_api.rest_api_server.models.enums import ImportStates @@ -8,6 +7,7 @@ from rest_api.rest_api_server.models.models import ReportImport from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from freezegun import freeze_time +from tools.optscale_time import utcnow class TestReportImportsApi(TestApiBase): @@ -57,7 +57,7 @@ def _create_import_object(self, state=ImportStates.SCHEDULED, session = BaseDB.session(engine)() cloud_acc_id = cloud_acc_id if cloud_acc_id else self.cloud_acc_id _import = ReportImport( - created_at=datetime.utcnow(), + created_at=utcnow(), deleted_at=0, cloud_account_id=cloud_acc_id, state=state, @@ -81,7 +81,7 @@ def test_report_update(self): 'state': ImportStates.FAILED.value, 'state_reason': 'test' * 200, } - now = datetime.utcnow() + now = utcnow() with freeze_time(now): code, _import = self.client.report_import_update(import_id, update) self.assertEqual(code, 200) @@ -346,7 +346,7 @@ def test_show_active(self): self.assertEqual(len(imports), 1) self.assertEqual(imports[0]['id'], import_id) - with freeze_time(datetime.utcnow() + timedelta(minutes=31)): + with freeze_time(utcnow() + timedelta(minutes=31)): code, resp = self.client.report_import_list( self.cloud_acc_id, show_active=True) self.assertEqual(code, 200) diff --git a/rest_api/rest_api_server/tests/unittests/test_resource_constraints.py b/rest_api/rest_api_server/tests/unittests/test_resource_constraints.py index 64a10d684..efe51a6ce 100644 --- a/rest_api/rest_api_server/tests/unittests/test_resource_constraints.py +++ b/rest_api/rest_api_server/tests/unittests/test_resource_constraints.py @@ -1,8 +1,8 @@ import uuid -from datetime import datetime from unittest.mock import patch from rest_api.rest_api_server.models.enums import ConstraintTypes from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestResourceConstraints(TestApiBase): @@ -39,7 +39,7 @@ def setUp(self, version='v2'): _, self.resource = self._create_cloud_resource(self.cloud_acc['id'], self.valid_resource) self.valid_constraint = { - 'limit': int(datetime.utcnow().timestamp()) + 3600, + 'limit': utcnow_timestamp() + 3600, 'type': 'ttl' } @@ -53,7 +53,7 @@ def _create_cloud_resource(self, cloud_acc_id, params, active=True, '_id': resource['id'] }, update={'$set': { - 'last_seen': int(datetime.utcnow().timestamp() - 1), + 'last_seen': utcnow_timestamp() - 1, 'active': True }} ) @@ -149,7 +149,7 @@ def test_create_duplicate(self): self.assertEqual(code, 201) code, response = self.client.resource_constraint_create( self.resource['id'], { - 'limit': int(datetime.utcnow().timestamp()) + 7200, + 'limit': utcnow_timestamp() + 7200, 'type': 'ttl' } ) @@ -221,7 +221,7 @@ def test_unexpected_and_immutable(self): self.assertEqual(response['error']['error_code'], 'OE0211') def test_update(self): - limit = int(datetime.utcnow().timestamp()) + 1800 + limit = utcnow_timestamp() + 1800 _, response = self.client.resource_constraint_create( self.resource['id'], self.valid_constraint) code, response = self.client.resource_constraint_update( @@ -271,7 +271,7 @@ def test_create_constraint_invactive_resource(self): def test_constraint_limit_min_max_values(self): out_of_limits_values = { 'ttl': [(-1, 'OE0224'), (720, 'OE0461'), - (int(datetime.utcnow().timestamp()) - 1, 'OE0461')], + (utcnow_timestamp() - 1, 'OE0461')], 'total_expense_limit': [(-1, 'OE0224'), (2147483648, 'OE0224')] } for constr_type, values in out_of_limits_values.items(): @@ -285,7 +285,7 @@ def test_constraint_limit_min_max_values(self): self.assertEqual(response['error']['error_code'], error_code) code, constraint_ttl = self.client.resource_constraint_create( self.resource['id'], { - 'limit': int(datetime.utcnow().timestamp()) + 3600, + 'limit': utcnow_timestamp() + 3600, 'type': 'ttl' }) self.assertEqual(code, 201) @@ -342,7 +342,7 @@ def test_resource_constraint_events(self): 'rest_api.rest_api_server.controllers.base.BaseController.' 'publish_activities_task' ).start() - limit = int(datetime.utcnow().timestamp()) + 3600 + limit = utcnow_timestamp() + 3600 code, constraint = self.client.resource_constraint_create( self.resource['id'], {'limit': limit, 'type': 'ttl'}) self.assertEqual(code, 201) diff --git a/rest_api/rest_api_server/tests/unittests/test_resource_count.py b/rest_api/rest_api_server/tests/unittests/test_resource_count.py index 24520d4a4..9e356144e 100644 --- a/rest_api/rest_api_server/tests/unittests/test_resource_count.py +++ b/rest_api/rest_api_server/tests/unittests/test_resource_count.py @@ -2,6 +2,7 @@ from unittest.mock import patch from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import timestamp_to_day_start +from tools.optscale_time import utcnow_timestamp class TestResourcesCountApi(TestApiBase): @@ -397,7 +398,7 @@ def test_filter_by_bool_params(self): self.cloud_acc1['id'], first_seen=self.day1, last_seen=self.day2, r_type='type2', count=5) self._add_extra_fields(res1, recommendations={ - 'run_timestamp': datetime.utcnow().timestamp()}) + 'run_timestamp': utcnow_timestamp()}) code, res = self.client.resources_count_get( self.org_id, self.day1, self.day2_inside, 'resource_type', diff --git a/rest_api/rest_api_server/tests/unittests/test_resource_metrics.py b/rest_api/rest_api_server/tests/unittests/test_resource_metrics.py index b2fd6f613..2b37bb288 100644 --- a/rest_api/rest_api_server/tests/unittests/test_resource_metrics.py +++ b/rest_api/rest_api_server/tests/unittests/test_resource_metrics.py @@ -1,8 +1,8 @@ import uuid -from datetime import datetime from unittest.mock import patch from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestResourceMetrics(TestApiBase): @@ -37,7 +37,7 @@ def setUp(self, version='v2'): _, self.resource = self._create_cloud_resource( cloud_account['id'], self.valid_resource) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() self.base_payload = { 'start_date': now, 'end_date': now + 12345, 'interval': 900} @@ -51,7 +51,7 @@ def _create_cloud_resource(self, cloud_acc_id, params, active=True, '_id': resource['id'] }, update={'$set': { - 'last_seen': int(datetime.utcnow().timestamp() - 1), + 'last_seen': utcnow_timestamp() - 1, 'active': True }} ) @@ -68,7 +68,7 @@ def test_metrics_interval_multiplier(self): def test_metrics_invalid_dates(self): validation_params = [ - ('start_date', int(datetime.utcnow().timestamp()) + 1000000, + ('start_date', utcnow_timestamp() + 1000000, 'OE0446'), ('start_date', 'aaa', 'OE0217'), ('start_date', None, 'OE0216'), diff --git a/rest_api/rest_api_server/tests/unittests/test_resources_observer.py b/rest_api/rest_api_server/tests/unittests/test_resources_observer.py index 507a2146a..46054dad4 100644 --- a/rest_api/rest_api_server/tests/unittests/test_resources_observer.py +++ b/rest_api/rest_api_server/tests/unittests/test_resources_observer.py @@ -9,6 +9,7 @@ from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from tools.cloud_adapter.model import InstanceResource +import tools.optscale_time as opttime NEWLY_DISCOVERED_TIME = 300 # 5 min PROCESS_RESOURCES = ('rest_api.rest_api_server.controllers.limit_hit.' @@ -67,7 +68,7 @@ def _create_mongo_resource(self, name, cloud_account_id=None, organization_id=None, created_at=None, applied_rules=None, active=True): if not created_at: - created_at = int(datetime.utcnow().timestamp()) + created_at = opttime.utcnow_timestamp() if not cloud_account_id: cloud_account_id = self.cloud_acc_id if not organization_id: @@ -139,7 +140,7 @@ def test_observe_only_cached(self, m_hits): resources = list(self.resources_collection.find()) for resource in resources: self.assertTrue(resource.get('active')) - now = datetime.utcnow() + now = opttime.utcnow() with freeze_time(now + timedelta(60)): self.resource_discovery_call(self.instances, create_resources=False) code, _ = self.client.observe_resources(self.org_id) @@ -161,7 +162,7 @@ def test_observe_newly_discovered_resources(self): self._create_mongo_resource('res1', applied_rules=[rule_1, rule_2]) self._create_mongo_resource('res2', applied_rules=[rule_1]) self._create_mongo_resource('res3', applied_rules=[rule_3, rule_2]) - old_times = int(datetime.utcnow().timestamp()) - 500 + old_times = opttime.utcnow_timestamp() - 500 self._create_mongo_resource('res_old', created_at=old_times, applied_rules=[rule_1, rule_2]) p_publish_activities = patch( @@ -195,7 +196,7 @@ def test_observe_newly_discovered_resources_clustered(self): code, ct = self.client.cluster_type_create( self.org_id, {'name': 'my_ct', 'tag_key': 'tn'}) self.assertEqual(code, 201) - now = datetime.utcnow() + now = opttime.utcnow() self.resource_discovery_call(self.instances) resources = list(self.resources_collection.find()) cluster = None @@ -241,7 +242,7 @@ def test_observe_newly_discovered_resources_clustered_active(self): code, ct = self.client.cluster_type_create( self.org_id, {'name': 'my_ct', 'tag_key': 'tn'}) self.assertEqual(code, 201) - now = datetime.utcnow() + now = opttime.utcnow() self.resource_discovery_call(self.instances) code, _ = self.client.observe_resources(self.org_id) @@ -278,7 +279,7 @@ def get_violated_resource_ids(): self.org['pool_id'], {'limit': 50, 'type': 'total_expense_limit'}) self.assertEqual(code, 201) employee_id = self.gen_id() - now = datetime.utcnow() + now = opttime.utcnow() res_ids = [] for r in [resources[0], resources[1], resources[2]]: res_ids.append(r['id']) @@ -293,7 +294,7 @@ def get_violated_resource_ids(): 'cost': 100, 'cloud_account_id': self.cloud_acc_id, 'resource_id': r['id'], - 'date': datetime.utcnow(), + 'date': opttime.utcnow(), 'sign': 1 }) self.update_resource_info_by_expenses(res_ids) @@ -313,7 +314,7 @@ def get_violated_resource_ids(): self.resources_collection.update_one( filter={'_id': resources[0]['id']}, update={'$set': { - 'last_seen': int(datetime.utcnow().timestamp()) - 2000, + 'last_seen': opttime.utcnow_timestamp() - 2000, 'active': False}}) code, _ = self.client.process_resource_violations(self.org_id) self.assertEqual(code, 204) @@ -336,7 +337,7 @@ def get_violated_resource_ids(): 'cost': 150, 'cloud_account_id': self.cloud_acc_id, 'resource_id': resources[1]['id'], - 'date': datetime.utcnow(), + 'date': opttime.utcnow(), 'sign': 1 }) self.update_resource_info_by_expenses([resources[1]['id']]) @@ -348,7 +349,7 @@ def get_violated_resource_ids(): 'cost': 150, 'cloud_account_id': self.cloud_acc_id, 'resource_id': resources[3]['id'], - 'date': datetime.utcnow(), + 'date': opttime.utcnow(), 'sign': 1 }) self.update_resource_info_by_expenses([resources[3]['id']]) @@ -405,7 +406,7 @@ def test_send_limit_hit_activities_task(self): 'cost': 100, 'cloud_account_id': self.cloud_acc_id, 'resource_id': r['id'], - 'date': datetime.utcnow(), + 'date': opttime.utcnow(), 'sign': 1 }) self.update_resource_info_by_expenses(r_ids) @@ -463,7 +464,7 @@ def test_activities_task_with_employee_alert_contact(self): 'cost': 100, 'cloud_account_id': self.cloud_acc_id, 'resource_id': r['id'], - 'date': datetime.utcnow(), + 'date': opttime.utcnow(), 'sign': 1 }) self.update_resource_info_by_expenses([ @@ -506,7 +507,7 @@ def test_process_ttl_limit_hits(self, p_alerts): 'sign': 1 }) - new_now = datetime.utcnow() + new_now = opttime.utcnow() with freeze_time(new_now): self.resource_discovery_call(self.instances) code, _ = self.client.process_resource_violations(self.org_id) @@ -538,8 +539,7 @@ def test_process_daily_limit_hits(self, p_alerts): res['id'], {'limit': limit, 'type': 'daily_expense_limit'}) self.assertEqual(code, 201) employee_id = self.gen_id() - today = datetime.utcnow().replace(hour=0, minute=0, second=0, - microsecond=0) + today = opttime.startday(opttime.utcnow()) yesterday = today - timedelta(days=1) r_ids = [] for r in [resources[0], resources[1], resources[2]]: @@ -559,7 +559,7 @@ def test_process_daily_limit_hits(self, p_alerts): 'sign': 1, }) self.update_resource_info_by_expenses(r_ids) - new_now = datetime.utcnow() + new_now = opttime.utcnow() with freeze_time(new_now): self.resource_discovery_call(self.instances) code, _ = self.client.process_resource_violations(self.org_id) @@ -626,7 +626,7 @@ def test_released_booking_schedule(self): update={'$set': { 'shareable': True}} ) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() released_at = now - 100 acquired_since = released_at - NEWLY_DISCOVERED_TIME schedule_book = { @@ -662,7 +662,7 @@ def test_released_booking_future(self): update={'$set': { 'shareable': True}} ) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() released_at = now + 2 * NEWLY_DISCOVERED_TIME acquired_since = now schedule_book = { @@ -698,7 +698,7 @@ def test_released_booking_past(self): update={'$set': { 'shareable': True}} ) - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() released_at = now - 2 * NEWLY_DISCOVERED_TIME acquired_since = released_at - NEWLY_DISCOVERED_TIME schedule_book = { @@ -720,7 +720,7 @@ def test_send_env_changes_activities_task(self): 'BaseController.publish_activities_task').start() token_info = { 'user_id': self.user_id, - 'valid_until': datetime.utcnow().timestamp() * 2 + 'valid_until': opttime.utcnow_timestamp() * 2 } patch('rest_api.rest_api_server.handlers.v1.base.' 'BaseAuthHandler.get_meta_by_token', return_value=token_info @@ -767,7 +767,7 @@ def test_send_env_changes_activities_task(self): def test_send_env_changes_activities_task_without_alert(self): token_info = { 'user_id': self.user_id, - 'valid_until': datetime.utcnow().timestamp() * 2 + 'valid_until': opttime.utcnow_timestamp() * 2 } patch('rest_api.rest_api_server.handlers.v1.base.' 'BaseAuthHandler.get_meta_by_token', return_value=token_info @@ -816,8 +816,7 @@ def test_policy_constraint_limit_hit(self): update={'$set': {'employee_id': self.employee['id'], 'pool_id': pool['id'], 'first_seen': 1}}) - today = datetime.utcnow().replace(hour=0, minute=0, second=0, - microsecond=0) + today = opttime.startday(opttime.utcnow()) self.expenses.append({ 'cost': 99.1234, 'cloud_account_id': self.cloud_acc_id, @@ -871,8 +870,7 @@ def test_no_green_hit_on_zero_exp(self): update={'$set': {'employee_id': self.employee['id'], 'pool_id': pool['id'], 'first_seen': 1}}) - today = datetime.utcnow().replace(hour=0, minute=0, second=0, - microsecond=0) + today = opttime.startday(opttime.utcnow()) self.expenses.append({ 'cost': 99.1234, 'cloud_account_id': self.cloud_acc_id, diff --git a/rest_api/rest_api_server/tests/unittests/test_schedule_imports.py b/rest_api/rest_api_server/tests/unittests/test_schedule_imports.py index 7f1acf503..13ecf714f 100644 --- a/rest_api/rest_api_server/tests/unittests/test_schedule_imports.py +++ b/rest_api/rest_api_server/tests/unittests/test_schedule_imports.py @@ -1,4 +1,3 @@ -import os import uuid from datetime import datetime, timedelta from unittest.mock import patch @@ -12,6 +11,7 @@ ) from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import MAX_32_INT, encode_config +import tools.optscale_time as opttime class TestScheduleImportsApi(TestApiBase): @@ -84,7 +84,7 @@ def _create_cloud_acc_object(self, import_period=None, auto_import=True, session = BaseDB.session(engine)() cloud_acc = CloudAccount( name=str(uuid.uuid4()), - created_at=int(datetime.utcnow().timestamp()), + created_at=opttime.utcnow_timestamp(), deleted_at=0, config=encode_config(config), organization_id=org_id, @@ -197,7 +197,7 @@ def test_create_scheduled_duplicate(self): self.assertEqual(len(ret['report_imports']), 1) code, ret = self.client.schedule_import(0) self.assertEqual(len(ret['report_imports']), 0) - with freeze_time(datetime.utcnow() + timedelta(hours=3)): + with freeze_time(opttime.utcnow() + timedelta(hours=3)): code, resp = self.client.schedule_import(0) self.assertEqual(len(resp['report_imports']), 1) code, ret = self.client.schedule_import(0) @@ -216,10 +216,10 @@ def test_create_active_duplicate(self): self.client.report_import_update(imp['id'], {'state': 'in_progress'}) code, ret = self.client.schedule_import(0) self.assertEqual(len(ret['report_imports']), 0) - with freeze_time(datetime.utcnow() + timedelta(hours=10)): + with freeze_time(opttime.utcnow() + timedelta(hours=10)): self.client.report_import_update(imp['id'], {}) code, ret = self.client.schedule_import(0) self.assertEqual(len(ret['report_imports']), 0) - with freeze_time(datetime.utcnow() + timedelta(hours=10, minutes=31)): + with freeze_time(opttime.utcnow() + timedelta(hours=10, minutes=31)): code, ret = self.client.schedule_import(0) self.assertEqual(len(ret['report_imports']), 1) diff --git a/rest_api/rest_api_server/tests/unittests/test_shareable_resources_api.py b/rest_api/rest_api_server/tests/unittests/test_shareable_resources_api.py index f2c2c663d..16c0659a1 100644 --- a/rest_api/rest_api_server/tests/unittests/test_shareable_resources_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_shareable_resources_api.py @@ -9,6 +9,7 @@ from rest_api.google_calendar_client.client import CalendarException from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.exceptions import Err +from tools.optscale_time import utcnow_timestamp DAY_SECONDS = 24 * 60 * 60 @@ -148,7 +149,7 @@ def test_not_existing_employee(self): def test_unexpected_params(self): resource_id = self._create_resource()['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now - DAY_SECONDS schedule_book = { 'resource_id': resource_id, @@ -164,7 +165,7 @@ def test_unexpected_params(self): def test_invalid_int_param(self): resource_id = self._create_resource()['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() schedule_book = { 'resource_id': resource_id, 'acquired_by_id': self.employee_2['id'], @@ -207,7 +208,7 @@ def test_create_env_booking(self): self.organization_id, env_resource) self.assertEqual(code, 201) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now + DAY_SECONDS schedule_book = { 'resource_id': resource['id'], @@ -256,7 +257,7 @@ def test_make_cloud_resource_clustered(self): shareable_resource_1 = self._create_resource(tags={'type': 'val'}) shareable_resource_1_id = shareable_resource_1['id'] self.assertEqual(shareable_resource_1['shareable'], True) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now + DAY_SECONDS released_at = since + DAY_SECONDS schedule_book = { @@ -318,7 +319,7 @@ def test_make_cloud_resource_clustered(self): def test_invalid_dates(self): resource_id = self._create_resource()['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now + DAY_SECONDS schedule_book = { 'resource_id': resource_id, @@ -428,7 +429,7 @@ def test_cluster_created(self): def test_create_duplicated_bookings(self): resource_id = self._create_resource()['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() released_at = now - DAY_SECONDS since = released_at - DAY_SECONDS schedule_book = { @@ -447,7 +448,7 @@ def test_create_duplicated_bookings(self): def test_date_invalid_slot_create(self): resource_id = self._create_resource()['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() released_at = now - DAY_SECONDS since = released_at - DAY_SECONDS schedule_book = { @@ -503,7 +504,7 @@ def test_date_invalid_slot_create(self): def test_patch_shareable(self): resource = self._create_resource() resource_id = resource['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() until = now + 2 * DAY_SECONDS schedule_book = { 'resource_id': resource_id, @@ -549,7 +550,7 @@ def test_patch_shareable(self): def test_patch_None_released_at(self): resource = self._create_resource() resource_id = resource['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() until = now + DAY_SECONDS schedule_book = { 'resource_id': resource_id, @@ -581,7 +582,7 @@ def test_patch_None_released_at(self): def test_patch_invalid_scope_period(self): resource = self._create_resource() resource_id = resource['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now + DAY_SECONDS until = since + DAY_SECONDS schedule_book = { @@ -622,12 +623,12 @@ def test_release_not_shareable(self): ) code, response = self.client.shareable_book_release( book_1['id'], { - 'released_at': int(datetime.utcnow().timestamp()) + 10}) + 'released_at': utcnow_timestamp() + 10}) self.assertEqual(code, 400) self.assertEqual(response['error']['error_code'], 'OE0480') def test_shareable_resource_list(self): - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() old_since = now - DAY_SECONDS old_until = now - 1 since = now + DAY_SECONDS @@ -668,7 +669,7 @@ def test_shareable_resource_list(self): def test_get_booking(self): resource = self._create_resource() - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource_id = resource['id'] schedule_book = { 'resource_id': resource_id, @@ -697,7 +698,7 @@ def test_get_not_existing_booking(self): def test_get_deleted_booking_event(self): resource = self._create_resource() - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource_id = resource['id'] schedule_book = { 'resource_id': resource_id, @@ -725,7 +726,7 @@ def test_get_deleted_booking_event(self): def test_delete_booking_from_invalid_period(self): resource = self._create_resource(employee_id=self.employee_2['id']) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource_id = resource['id'] schedule_book = { 'resource_id': resource_id, @@ -775,7 +776,7 @@ def side_eff(action, *args, **kwargs): def test_delete_booking_by_org_manager(self): resource = self._create_resource() - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource_id = resource['id'] schedule_book = { 'resource_id': resource_id, @@ -813,7 +814,7 @@ def test_delete_booking_by_org_manager(self): def test_create_booking_in_past(self): resource = self._create_resource() - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now - DAY_SECONDS resource_id = resource['id'] schedule_book = { @@ -881,7 +882,7 @@ def test_calendar_events(self): p_update_event = patch( 'rest_api.google_calendar_client.client.GoogleCalendarClient.update_event' ).start() - dt = int(datetime.utcnow().timestamp()) + 1000 + dt = utcnow_timestamp() + 1000 code, response = self.client.shareable_book_release( booking['id'], {'released_at': dt}) self.assertEqual(code, 200) @@ -937,7 +938,7 @@ def test_calendar_events_negative(self): self.organization_id, schedule_book) self.assertEqual(code, 201) - dt = int(datetime.utcnow().timestamp()) + 1000 + dt = utcnow_timestamp() + 1000 patch( 'rest_api.google_calendar_client.client.GoogleCalendarClient.list_events', return_value=[event] @@ -1000,7 +1001,7 @@ def test_get_bookings_for_resource(self): self.assertEqual(response['bookings'], []) # only future and current bookings are returned - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() now_until = now + DAY_SECONDS - 1 old_since = now - DAY_SECONDS old_until = now - 1 @@ -1035,7 +1036,7 @@ def test_bookings_list_for_organization(self): self.assertEqual(code, 200) self.assertEqual(response['bookings'], []) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() now_until = now + DAY_SECONDS - 1 old_since = now - DAY_SECONDS old_until = now - 1 @@ -1067,7 +1068,7 @@ def test_bookings_list_for_organization(self): def test_patch_current_booking(self): resource = self._create_resource() resource_id = resource['id'] - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() since = now - DAY_SECONDS until = now - 100 schedule_book = { @@ -1275,7 +1276,7 @@ def test_get_shareable_resource_details(self): def test_autorelease(self): for autorelease in [True, False]: resource = self._create_resource(employee_id=self.employee['id']) - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource_id = resource['id'] schedule_book = { 'resource_id': resource_id, diff --git a/rest_api/rest_api_server/tests/unittests/test_shareable_resources_bulk_api.py b/rest_api/rest_api_server/tests/unittests/test_shareable_resources_bulk_api.py index 76d95e7ba..2fa309e2f 100644 --- a/rest_api/rest_api_server/tests/unittests/test_shareable_resources_bulk_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_shareable_resources_bulk_api.py @@ -1,8 +1,8 @@ -from datetime import datetime from unittest.mock import patch, ANY from tools.cloud_adapter.model import ResourceTypes from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.exceptions import Err +from tools.optscale_time import utcnow_timestamp class TestShareableResourcesApi(TestApiBase): @@ -59,7 +59,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, name='test_resource', tags=None, last_seen=None, region=None, first_seen=None, shareable=False, use_resource_hash=False): - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource_key = 'cloud_resource_id' if use_resource_hash: resource_key = 'cloud_resource_hash' diff --git a/rest_api/rest_api_server/tests/unittests/test_shareable_resources_split_api.py b/rest_api/rest_api_server/tests/unittests/test_shareable_resources_split_api.py index c1667010c..45b916d26 100644 --- a/rest_api/rest_api_server/tests/unittests/test_shareable_resources_split_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_shareable_resources_split_api.py @@ -1,7 +1,7 @@ -from datetime import datetime from unittest.mock import patch from tools.cloud_adapter.model import ResourceTypes from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestSplitShareableResourcesApi(TestApiBase): @@ -54,7 +54,7 @@ def create_cloud_resource(self, cloud_account_id, employee_id=None, pool_id=None, resource_type='test_type', name='test_resource', tags=None, last_seen=None, region=None, first_seen=None): - now = int(datetime.utcnow().timestamp()) + now = utcnow_timestamp() resource = { 'cloud_resource_id': self.gen_id(), 'name': name, diff --git a/rest_api/rest_api_server/tests/unittests/test_traffic_expenses_api.py b/rest_api/rest_api_server/tests/unittests/test_traffic_expenses_api.py index 3f0866b9d..c73ca7fb2 100644 --- a/rest_api/rest_api_server/tests/unittests/test_traffic_expenses_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_traffic_expenses_api.py @@ -2,6 +2,7 @@ from unittest.mock import patch, ANY from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase from rest_api.rest_api_server.utils import get_nil_uuid +from tools.optscale_time import utcnow_timestamp class TestTrafficExpensesApi(TestApiBase): @@ -123,7 +124,7 @@ def test_dates_values(self): self.verify_error_code(response, 'OE0224') def test_limit(self): - time = int(datetime.utcnow().timestamp()) + time = utcnow_timestamp() code, response = self.client.traffic_expenses_get( self.org_id, time, time + 1, {'limit': 1}) self.assertEqual(code, 400) diff --git a/rest_api/rest_api_server/tests/unittests/test_ttl_analysis.py b/rest_api/rest_api_server/tests/unittests/test_ttl_analysis.py index 22e691b07..90b91074d 100644 --- a/rest_api/rest_api_server/tests/unittests/test_ttl_analysis.py +++ b/rest_api/rest_api_server/tests/unittests/test_ttl_analysis.py @@ -4,6 +4,7 @@ from unittest.mock import patch from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +import tools.optscale_time as opttime class TestTtlAnalysis(TestApiBase): @@ -49,8 +50,7 @@ def add_resource(self, owner_id=None, pool_id=None, active=False, del resource_dict['resources'][0]['name'] if active: resource_dict['resources'][0]['active'] = True - resource_dict['resources'][0]['last_seen'] = int( - datetime.utcnow().timestamp()) + resource_dict['resources'][0]['last_seen'] = opttime.utcnow_timestamp() if last_seen: resource_dict['resources'][0]['last_seen'] = int(last_seen) code, resources = self.cloud_resource_create_bulk( @@ -60,7 +60,7 @@ def add_resource(self, owner_id=None, pool_id=None, active=False, return resources['resources'][0] def add_expenses(self, resource, starting_date): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() last_seen = resource.get('last_seen') if not last_seen: last_seen = now @@ -78,13 +78,13 @@ def add_expenses(self, resource, starting_date): self.update_resource_info_by_expenses([resource['id']]) def test_no_default_ttl_value(self): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() code, resp = self.client.ttl_analysis_get(self.pool_id, 0, now) self.assertEqual(code, 424) self.verify_error_code(resp, 'OE0457') def test_invalid_pool(self): - now = int(datetime.utcnow().timestamp()) + now = opttime.utcnow_timestamp() code, resp = self.client.ttl_analysis_get(self.gen_id(), 0, now, 10) self.assertEqual(code, 404) self.verify_error_code(resp, 'OE0002') @@ -129,7 +129,7 @@ def test_ttl_analysis_no_data(self): @freeze_time("2021-01-12 16:14:00") def test_ttl_analysis(self): ttl = 30 - now = datetime.utcnow() + now = opttime.utcnow() expense_start_date = now - timedelta(days=3) code, sub_pool = self.client.pool_create( self.org_id, {"name": "sub", "parent_id": self.pool_id}) @@ -169,7 +169,7 @@ def test_ttl_analysis(self): @freeze_time("2021-01-12 16:14:00") def test_resources_created_deleted_in_range(self): ttl = 5 - now = datetime.utcnow() + now = opttime.utcnow() expense_start_date = now - timedelta(days=1) code, sub_pool = self.client.pool_create( self.org_id, {"name": "sub", "parent_id": self.pool_id}) @@ -214,7 +214,7 @@ def test_resources_created_deleted_in_range(self): @freeze_time("2021-01-12 16:14:00") def test_ttl_reached_before_date_range(self): ttl = 30 - now = datetime.utcnow() + now = opttime.utcnow() expense_start_date = now - timedelta(days=4) code, sub_pool = self.client.pool_create( self.org_id, {"name": "sub", "parent_id": self.pool_id}) @@ -258,7 +258,7 @@ def test_ttl_from_policy(self): self.pool_id, {'limit': ttl, 'type': 'ttl'}) self.assertEqual(code, 201) - now = datetime.utcnow() + now = opttime.utcnow() expense_start_date = now - timedelta(days=3) resource = self.add_resource(owner_id=self.employee_id, pool_id=self.pool_id, active=True) @@ -286,7 +286,7 @@ def test_ttl_from_policy(self): @freeze_time("2021-01-12 16:14:00") def test_resource_without_name(self): - now = datetime.utcnow() + now = opttime.utcnow() expense_start_date = now - timedelta(days=3) resource = self.add_resource(owner_id=self.employee_id, pool_id=self.pool_id, active=True, diff --git a/rest_api/rest_api_server/tests/unittests/test_webhooks_api.py b/rest_api/rest_api_server/tests/unittests/test_webhooks_api.py index a10f5cfb8..4f1d01b16 100644 --- a/rest_api/rest_api_server/tests/unittests/test_webhooks_api.py +++ b/rest_api/rest_api_server/tests/unittests/test_webhooks_api.py @@ -3,6 +3,7 @@ from datetime import datetime from unittest.mock import patch, ANY from rest_api.rest_api_server.tests.unittests.test_api_base import TestApiBase +from tools.optscale_time import utcnow_timestamp class TestWebhooksApi(TestApiBase): @@ -358,7 +359,7 @@ def test_publish_hook_task(self): schedule_book) self.assertEqual(code, 201) code, _ = self.client.shareable_book_release( - book['id'], {'released_at': int(datetime.utcnow().timestamp())}) + book['id'], {'released_at': utcnow_timestamp()}) self.assertEqual(code, 200) self.assertEqual(p_publish_task.call_count, 1) p_publish_task.assert_called_with({ diff --git a/rest_api/rest_api_server/utils.py b/rest_api/rest_api_server/utils.py index 80ce6d9c3..0a4befd80 100644 --- a/rest_api/rest_api_server/utils.py +++ b/rest_api/rest_api_server/utils.py @@ -31,6 +31,7 @@ from tools.optscale_exceptions.http_exc import OptHTTPError from pymongo.errors import BulkWriteError from tools.cloud_adapter.exceptions import CloudAdapterBaseException +from tools.optscale_time import utcfromtimestamp, utcnow from rest_api.rest_api_server.exceptions import Err from retrying import retry import unicodedata @@ -322,7 +323,7 @@ def gen_id(): def now_timestamp(): - return int(datetime.utcnow().timestamp()) + return int(utcnow().timestamp()) def safe_string(str_, length=20): @@ -602,5 +603,5 @@ def inner(*args, **kwargs): def timestamp_to_day_start(timestamp) -> datetime: - return datetime.utcfromtimestamp(timestamp).replace( + return utcfromtimestamp(timestamp).replace( hour=0, minute=0, second=0, microsecond=0) diff --git a/risp/risp_worker/migrator.py b/risp/risp_worker/migrator.py index add0f7c75..16eba53f1 100644 --- a/risp/risp_worker/migrator.py +++ b/risp/risp_worker/migrator.py @@ -2,7 +2,7 @@ import hashlib import importlib import logging -from datetime import datetime +from datetime import datetime, timezone from clickhouse_driver import Client as ClickHouseClient LOG = logging.getLogger(__name__) @@ -108,7 +108,7 @@ def update_versions_table(self, filename): 'version': self._get_version_from_name(filename), 'md5': self._get_script_from_name(filename), 'script': self._get_script_from_name(filename), - 'created_at': datetime.utcnow() + 'created_at': datetime.now(tz=timezone.utc).replace(tzinfo=None) }] self.clickhouse_client.execute( f"INSERT INTO {VERSIONS_TABLE} VALUES", version) diff --git a/slacker/Dockerfile b/slacker/Dockerfile index 178228d06..df46a2f3c 100644 --- a/slacker/Dockerfile +++ b/slacker/Dockerfile @@ -3,6 +3,7 @@ WORKDIR /src ENV PYTHONPATH=/src COPY tools/optscale_exceptions tools/optscale_exceptions +COPY tools/optscale_time tools/optscale_time COPY optscale_client/auth_client optscale_client/auth_client COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client diff --git a/slacker/requirements.txt b/slacker/requirements.txt index efb0765cd..ceba3270c 100644 --- a/slacker/requirements.txt +++ b/slacker/requirements.txt @@ -9,6 +9,7 @@ pyyaml==6.0.1 currency-symbols==2.0.3 # OptScale packages -e tools/optscale_exceptions +-e tools/optscale_time -e optscale_client/auth_client -e optscale_client/config_client -e optscale_client/rest_api_client diff --git a/slacker/slacker_server/controllers/slack.py b/slacker/slacker_server/controllers/slack.py index 4fa440a7b..4843c8601 100644 --- a/slacker/slacker_server/controllers/slack.py +++ b/slacker/slacker_server/controllers/slack.py @@ -31,6 +31,7 @@ get_ca_not_connected_message, get_not_have_slack_permissions_message) from slacker.slacker_server.models.models import User from slacker.slacker_server.utils import gen_id +from tools.optscale_time import utcfromtimestamp, utcnow_timestamp LOG = logging.getLogger(__name__) TTL_LIMIT_TO_SHOW = 72 @@ -179,7 +180,7 @@ def disconnect(self, ack, say, body, logger): auth_cl, _ = self.get_user_api_clients(user.auth_user_id) _, user_info = auth_cl.user_get(user.auth_user_id) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = utcnow_timestamp() user.deleted_at = now_ts self.session.add(user) try: @@ -262,7 +263,7 @@ def message_resources(self, ack, body, say, logger): _, expenses_resp = rest_cl.clean_expenses_get( organization_id=user.organization_id, start_date=0, - end_date=int(datetime.utcnow().timestamp()), + end_date=utcnow_timestamp(), params={'owner_id': [user.employee_id], 'active': True} ) @@ -304,7 +305,7 @@ def _check_expense_limit(expense_constr): EXPENSE_LIMIT_TO_SHOW) if ttl_constr: - hrs = (ttl_constr['limit'] - datetime.utcnow().timestamp()) / SEC_IN_HRS + hrs = (ttl_constr['limit'] - utcnow_timestamp()) / SEC_IN_HRS if int(hrs) <= TTL_LIMIT_TO_SHOW: shown_data[i]['ttl'] = hrs if (total_expense_constr and @@ -401,7 +402,7 @@ def update_ttl_submit(self, ack, body, client, view, say, logger): resource_id, details=True) try: if view_value != -1: - limit = int(datetime.utcnow().timestamp()) + view_value * SEC_IN_HRS + limit = utcnow_timestamp() + view_value * SEC_IN_HRS if resource["details"]["constraints"].get("ttl"): rest_cl.resource_constraint_update( resource["details"]["constraints"]["ttl"]["id"], @@ -628,7 +629,7 @@ def message_slack_permissions(self, say): say(get_not_have_slack_permissions_message()) def get_current_booking(self, booking_list): - now_ts = int(datetime.utcnow().timestamp()) + now_ts = utcnow_timestamp() for booking in booking_list: if (booking['acquired_since'] <= now_ts < booking['released_at'] or (booking['acquired_since'] <= now_ts and @@ -636,7 +637,7 @@ def get_current_booking(self, booking_list): return booking def _ts_to_string(self, date_ts): - date = datetime.utcfromtimestamp(date_ts) + date = utcfromtimestamp(date_ts) return datetime.strftime(date, "%m/%d/%Y %H:%M UTC") def get_booking_parameters(self, booking): @@ -801,7 +802,7 @@ def booking_details(self, ack, say, action, body, logger): except HTTPError: pass current_booking = self.get_current_booking(bookings) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = utcnow_timestamp() _, employee_list = rest_cl.employee_list(org['id']) employee_id_map = {x['id']: x for x in employee_list['employees']} for booking in bookings: @@ -865,7 +866,7 @@ def release_booking(self, ack, action, body, say, logger): _, rest_cl = self.get_user_api_clients(user.auth_user_id) - now_ts = int(datetime.utcnow().timestamp()) + now_ts = utcnow_timestamp() try: _, book = rest_cl.shareable_book_get(booking_id) _, resource = rest_cl.cloud_resource_get(book['resource_id']) diff --git a/slacker/slacker_server/generate_slack_messages.py b/slacker/slacker_server/generate_slack_messages.py index 1be176433..0d9d8cf81 100644 --- a/slacker/slacker_server/generate_slack_messages.py +++ b/slacker/slacker_server/generate_slack_messages.py @@ -28,15 +28,16 @@ from slacker.slacker_server.message_templates.resource_details import * from slacker.slacker_server.message_templates.warnings import * from slacker.slacker_server.models.db_factory import DBType, DBFactory +from tools.optscale_time import utcfromtimestamp, utcnow_timestamp -NOW_TS = int(datetime.utcnow().timestamp()) -ACQUIRED_SINCE = datetime.strftime(datetime.utcfromtimestamp( +NOW_TS = utcnow_timestamp() +ACQUIRED_SINCE = datetime.strftime(utcfromtimestamp( NOW_TS - 60), "%m/%d/%Y %H:%M UTC") -RELEASED_AT = datetime.strftime(datetime.utcfromtimestamp(NOW_TS + 3600), +RELEASED_AT = datetime.strftime(utcfromtimestamp(NOW_TS + 3600), "%m/%d/%Y %H:%M UTC") -FUTURE_ACQUIRED_SINCE = datetime.strftime(datetime.utcfromtimestamp( +FUTURE_ACQUIRED_SINCE = datetime.strftime(utcfromtimestamp( NOW_TS + 3660), "%m/%d/%Y %H:%M UTC") -FUTURE_RELEASED_AT = datetime.strftime(datetime.utcfromtimestamp( +FUTURE_RELEASED_AT = datetime.strftime(utcfromtimestamp( NOW_TS + 7200), "%m/%d/%Y %H:%M UTC") POOL = {"name": "My pool", "limit": 500, "id": str(uuid.uuid4())} CLOUD_ACCOUNT = {"name": "My super cloud", "id": str(uuid.uuid4())} diff --git a/slacker/slacker_server/message_templates/alerts.py b/slacker/slacker_server/message_templates/alerts.py index d23f5edcc..87272892d 100644 --- a/slacker/slacker_server/message_templates/alerts.py +++ b/slacker/slacker_server/message_templates/alerts.py @@ -1,5 +1,5 @@ -from datetime import datetime from currency_symbols.currency_symbols import CURRENCY_SYMBOLS_MAP as CURRENCY_MAP +from tools.optscale_time import utcnow __all__ = ['get_alert_message', 'get_alert_added_message', 'get_alert_removed_message', 'get_join_channel_message', @@ -19,7 +19,7 @@ def get_alert_message(pool_name, organization_name, organization_id, exceed_str = f'*{c_sign}{threshold}* threshold' else: exceed_str = f'*{threshold}%*' - now = datetime.utcnow() + now = utcnow() month_start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0) today_end = now.replace(hour=23, minute=59, second=59) start_ts = int(month_start.timestamp()) diff --git a/slacker/slacker_server/message_templates/bookings.py b/slacker/slacker_server/message_templates/bookings.py index c2833cfb2..109615587 100644 --- a/slacker/slacker_server/message_templates/bookings.py +++ b/slacker/slacker_server/message_templates/bookings.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from tools.optscale_time import utcnow __all__ = ['get_time_options', 'get_add_bookings_form', 'get_booking_block', 'get_booking_details_message'] @@ -7,7 +8,7 @@ def get_time_options(): options = [] initial_option = None - now = datetime.utcnow() + now = utcnow() start = now.replace(hour=0, minute=0, second=0, microsecond=0) end = start + timedelta(days=1) minutes = 0 @@ -29,7 +30,7 @@ def get_time_options(): def get_add_bookings_form(resource, public_ip): - now_date = datetime.strftime(datetime.utcnow(), "%Y-%m-%d") + now_date = datetime.strftime(utcnow(), "%Y-%m-%d") r_id = resource['id'] r_name = resource.get('name') options, initial_option = get_time_options() diff --git a/slacker/slacker_server/message_templates/constraints.py b/slacker/slacker_server/message_templates/constraints.py index fcff58f50..235fa8cb9 100644 --- a/slacker/slacker_server/message_templates/constraints.py +++ b/slacker/slacker_server/message_templates/constraints.py @@ -1,5 +1,5 @@ -from datetime import datetime from slacker.slacker_server.message_templates.resource_details import get_resource_details_block +from tools.optscale_time import utcnow_timestamp __all__ = ['get_ttl_constraint_message', 'get_constraint_block', 'get_update_ttl_form', 'get_constraint_updated'] @@ -11,7 +11,7 @@ def get_ttl_constraint_message(ttl_constr): ttl_msg = None if ttl_constr: - hrs = (ttl_constr['limit'] - datetime.utcnow().timestamp()) / SEC_IN_HRS + hrs = (ttl_constr['limit'] - utcnow_timestamp()) / SEC_IN_HRS if ttl_constr['limit'] == 0: ttl_msg = ":warning:No limit" elif hrs <= -1: @@ -60,7 +60,7 @@ def get_update_ttl_form(resource, org_id, public_ip): }, } ] + get_resource_details_block(resource, org_id, public_ip) - created = (datetime.utcnow().timestamp() - resource['created_at']) / SEC_IN_HRS + created = (utcnow_timestamp() - resource['created_at']) / SEC_IN_HRS if created < 1: created_msg = '< 1 hour ago' elif created == 1: diff --git a/slacker/slacker_server/message_templates/resource_details.py b/slacker/slacker_server/message_templates/resource_details.py index b14fe416a..29d32bfde 100644 --- a/slacker/slacker_server/message_templates/resource_details.py +++ b/slacker/slacker_server/message_templates/resource_details.py @@ -1,6 +1,6 @@ from collections import OrderedDict -from datetime import datetime from currency_symbols.currency_symbols import CURRENCY_SYMBOLS_MAP +from tools.optscale_time import utcnow_timestamp __all__ = ['get_resource_details_block', 'get_resource_details_message'] @@ -96,7 +96,7 @@ def get_resource_details_message( ttl = constraints.get('ttl') if ttl: - hrs = (ttl['limit'] - datetime.utcnow().timestamp()) / SEC_IN_HRS + hrs = (ttl['limit'] - utcnow_timestamp()) / SEC_IN_HRS if ttl['limit'] == 0: ttl_msg = ":warning:No limit" elif hrs <= -1: diff --git a/slacker/slacker_server/tests/test_send_message_api.py b/slacker/slacker_server/tests/test_send_message_api.py index f00467cc5..e670c773e 100644 --- a/slacker/slacker_server/tests/test_send_message_api.py +++ b/slacker/slacker_server/tests/test_send_message_api.py @@ -1,8 +1,8 @@ from unittest.mock import patch, Mock, ANY -from datetime import datetime from slack_sdk.errors import SlackApiError from slacker.slacker_server.tests.test_api_base import TestApiBase from slacker.slacker_server.utils import gen_id +from tools.optscale_time import utcnow class TestSendMessageAPI(TestApiBase): @@ -24,7 +24,7 @@ def connect_user(self): patch('slacker.slacker_server.handlers.v2.base.' 'BaseHandler.get_meta_by_token', return_value={'user_id': self.user_id, - 'valid_until': datetime.utcnow().timestamp()*2} + 'valid_until': utcnow().timestamp()*2} ).start() auth_cl_mock.user_update.return_value = (200, {'email': '1@1.com'}) rest_cl_mock.organization_list.return_value = ( diff --git a/tools/cloud_adapter/clouds/alibaba.py b/tools/cloud_adapter/clouds/alibaba.py index e58112c35..1055df8a8 100644 --- a/tools/cloud_adapter/clouds/alibaba.py +++ b/tools/cloud_adapter/clouds/alibaba.py @@ -668,7 +668,7 @@ def pod_discovery_calls(self): return [] def configure_report(self): - now = datetime.utcnow() + now = datetime.now(tz=timezone.utc).replace(tzinfo=None) config_update = {} currency = self.DEFAULT_CURRENCY try: diff --git a/tools/cloud_adapter/clouds/aws.py b/tools/cloud_adapter/clouds/aws.py index 46a122ddc..6d4f30bf5 100644 --- a/tools/cloud_adapter/clouds/aws.py +++ b/tools/cloud_adapter/clouds/aws.py @@ -1,4 +1,4 @@ -import datetime +from datetime import datetime, timezone import enum from concurrent.futures.thread import ThreadPoolExecutor from functools import wraps @@ -1139,7 +1139,8 @@ def get_region_name_code_map(self): @_wrap_timeout_exception() def get_spot_history(self, region, flavors): return self.session.client('ec2', region).describe_spot_price_history( - InstanceTypes=flavors, StartTime=datetime.datetime.utcnow(), + InstanceTypes=flavors, + StartTime=datetime.now(tz=timezone.utc).replace(tzinfo=None), ) @staticmethod @@ -1186,7 +1187,7 @@ def discover_region_images(self, region, by_owner=True, filter_by=None): } for bdm in image.get('BlockDeviceMappings', []) ], - cloud_created_at=int(datetime.datetime.strptime( + cloud_created_at=int(datetime.strptime( image['CreationDate'], date_format).timestamp()), tags=self._extract_tags(image), ) diff --git a/tools/cloud_adapter/clouds/azure.py b/tools/cloud_adapter/clouds/azure.py index 4cbbd1d7f..04d745a62 100644 --- a/tools/cloud_adapter/clouds/azure.py +++ b/tools/cloud_adapter/clouds/azure.py @@ -1,4 +1,4 @@ -import datetime +from datetime import datetime, timezone, timedelta import enum import logging import time @@ -447,8 +447,8 @@ def _get_billing_info(self): warnings = [] usage_detail = None try: - range_end = datetime.datetime.utcnow() - range_start = range_end - datetime.timedelta(days=DAYS_IN_MONTH) + range_end = datetime.now(tz=timezone.utc).replace(tzinfo=None) + range_start = range_end - timedelta(days=DAYS_IN_MONTH) usage = self.get_usage(range_start, range_end, 1) if usage is None: raise StopIteration @@ -825,7 +825,7 @@ def get_usage(self, start_date, range_end=None, limit=None): date_format = '%Y-%m-%dT%H:%M:%S.%fZ' filter_fmt = "properties/usageStart ge '{}' and properties/usageEnd lt '{}'" if range_end is None: - range_end = datetime.datetime.utcnow() + range_end = datetime.now(tz=timezone.utc).replace(tzinfo=None) start_str = start_date.strftime(date_format) end_str = range_end.strftime(date_format) # test request to check subscription type diff --git a/tools/cloud_adapter/clouds/kubernetes.py b/tools/cloud_adapter/clouds/kubernetes.py index bdc1991dd..d788e2dd1 100644 --- a/tools/cloud_adapter/clouds/kubernetes.py +++ b/tools/cloud_adapter/clouds/kubernetes.py @@ -5,7 +5,7 @@ import requests -from datetime import datetime +from datetime import datetime, timezone from urllib.parse import quote @@ -187,7 +187,7 @@ def bucket_discovery_calls(self): return [] def discover_pod_resources(self): - now = int(datetime.utcnow().timestamp()) + now = int(datetime.now(tz=timezone.utc).timestamp()) pod_name_labels_map = self._get_pod_labels(now) pod_info_metrics = self._get_pods_info_metrics(now) service_selectors_map = self._get_service_selectors(now) diff --git a/tools/optscale_time/__init__.py b/tools/optscale_time/__init__.py new file mode 100644 index 000000000..535a8917c --- /dev/null +++ b/tools/optscale_time/__init__.py @@ -0,0 +1 @@ +from tools.optscale_time.optscale_time import * diff --git a/tools/optscale_time/optscale_time.py b/tools/optscale_time/optscale_time.py new file mode 100644 index 000000000..d4a50b152 --- /dev/null +++ b/tools/optscale_time/optscale_time.py @@ -0,0 +1,18 @@ +from datetime import datetime, timezone + + +def utcnow() -> datetime: + return datetime.now(tz=timezone.utc).replace(tzinfo=None) + + +def utcnow_timestamp() -> int: + return int(datetime.now(tz=timezone.utc).timestamp()) + + +def utcfromtimestamp(timestamp) -> datetime: + return datetime.fromtimestamp( + timestamp, timezone.utc).replace(tzinfo=None) + + +def startday(date) -> datetime: + return date.replace(hour=0, minute=0, second=0, microsecond=0) diff --git a/tools/optscale_time/setup.py b/tools/optscale_time/setup.py new file mode 100644 index 000000000..f4230e0af --- /dev/null +++ b/tools/optscale_time/setup.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +import sys +from setuptools import setup + + +setup(name='optscale-time', + description='OptScale Time', + url='http://hystax.com', + author='Hystax', + author_email='info@hystax.com', + package_dir={'optscale_time': ''}, + packages=['optscale_time'], + ) diff --git a/trapper/trapper_worker/Dockerfile b/trapper/trapper_worker/Dockerfile index 34fb7030d..fcc872d62 100644 --- a/trapper/trapper_worker/Dockerfile +++ b/trapper/trapper_worker/Dockerfile @@ -7,6 +7,7 @@ ENV PYTHONPATH=/usr/src/app/ COPY optscale_client/config_client optscale_client/config_client COPY optscale_client/rest_api_client optscale_client/rest_api_client COPY tools/cloud_adapter tools/cloud_adapter +COPY tools/optscale_time tools/optscale_time COPY trapper/trapper_worker/requirements.txt . RUN pip install --no-cache-dir -r requirements.txt diff --git a/trapper/trapper_worker/processor.py b/trapper/trapper_worker/processor.py index ee5fcae71..d56788365 100644 --- a/trapper/trapper_worker/processor.py +++ b/trapper/trapper_worker/processor.py @@ -7,6 +7,7 @@ from tools.cloud_adapter.cloud import Cloud as CloudAdapter from optscale_client.rest_api_client.client_v2 import Client as RestClient +from tools.optscale_time import utcfromtimestamp, utcnow LOG = get_logger(__name__) CHUNK_SIZE = 10000 @@ -98,8 +99,8 @@ def get_expenses_filters(self, cloud_account_id, tasks): '$or': [ { 'start_date': { - '$gte': datetime.utcfromtimestamp(t['start_date']), - '$lte': datetime.utcfromtimestamp(t['end_date']) + '$gte': utcfromtimestamp(t['start_date']), + '$lte': utcfromtimestamp(t['end_date']) } } for t in tasks @@ -336,8 +337,8 @@ def get_expenses_filters(self, cloud_account_id, tasks): '$or': [ { 'start_date': { - '$gte': datetime.utcfromtimestamp(t['start_date']), - '$lte': datetime.utcfromtimestamp(t['end_date']) + '$gte': utcfromtimestamp(t['start_date']), + '$lte': utcfromtimestamp(t['end_date']) } } for t in tasks diff --git a/trapper/trapper_worker/requirements.txt b/trapper/trapper_worker/requirements.txt index 696912c7c..c2b192060 100644 --- a/trapper/trapper_worker/requirements.txt +++ b/trapper/trapper_worker/requirements.txt @@ -6,3 +6,4 @@ urllib3==1.26.18 -e optscale_client/rest_api_client -e tools/cloud_adapter -e optscale_client/config_client +-e tools/optscale_time