Skip to content

Commit

Permalink
Pull request update/240201
Browse files Browse the repository at this point in the history
d666233 OS-7228. Renamed migration
484fa91 OS-7228. Replace two indexes with one in raw_expenses
collection
58400e4 OS-7260. Fixed 409 on name / description change for application
with
5e3ded8 OS-6770. Derive tooltip and slice state to decrease chart
tooltip updates amount
784afd9 OS-7253. Improve traffic_expenses performance
5d7ebe2 OS-7243. Fixed the katara hangup with a large number of tasks
in the …
179d8d9 OS-7267. Fix input end adornments for Primary and Secondary
metric fields
ab86cf8 OS-7248. Fixed issue when limit hits requested with wrong
entity id after changing an organization
d7fa6ea OS-7163. Fixed user index and removed legacy roles in auth
  • Loading branch information
maxb-hystax authored Feb 5, 2024
2 parents 8d9bebc + d666233 commit c4c8649
Show file tree
Hide file tree
Showing 28 changed files with 341 additions and 176 deletions.
15 changes: 8 additions & 7 deletions arcee/arcee_receiver/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ class Run(RunPostIn, RunPatchIn):


class ApplicationPatchIn(BaseModel):
goals: Optional[List[str]] = []
goals: Optional[List[str]] = None
name: Optional[str] = None
description: Optional[str] = None
owner_id: Optional[str] = None
Expand Down Expand Up @@ -304,12 +304,13 @@ async def update_application(request, body: ApplicationPatchIn, id_: str):
{"token": token, "_id": id_, "deleted_at": 0})
if not o:
raise SanicException("Not found", status_code=404)
await check_goals(body.goals)
goals_to_remove = set(o['goals']) - set(body.goals)
for goal_id in goals_to_remove:
if await _goal_used_in_lb(db, goal_id, application_id=id_):
raise SanicException(f"Goal is used in application leaderboard(s)",
status_code=409)
if body.goals is not None:
await check_goals(body.goals)
goals_to_remove = set(o['goals']) - set(body.goals)
for goal_id in goals_to_remove:
if await _goal_used_in_lb(db, goal_id, application_id=id_):
raise SanicException(f"Goal is used in application leaderboard(s)",
status_code=409)
d = body.model_dump(exclude_unset=True)
if d:
await db.application.update_one(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
# pylint: disable=C0103
""""remove_legacy_users"
Revision ID: 0321f4e3fe3f
Revises: cd08c646c952
Create Date: 2024-01-28 04:57:36.488047
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.orm import Session
from auth.auth_server.models.models import Action, Type, Role, RoleAction


# revision identifiers, used by Alembic.
revision = '0321f4e3fe3f'
down_revision = 'cd08c646c952'
branch_labels = None
depends_on = None

DRPLAN_OPERATOR = 'Drplan Operator'
ADMIN = 'Super Admin'


def upgrade():
assignment_t = sa.table('assignment',
sa.column('role_id', sa.String()))
role_t = sa.table('role',
sa.column('id', sa.String()),
sa.column('name', sa.String()))
role_action_t = sa.table('role_action',
sa.column('role_id', sa.String()))
bind = op.get_bind()
session = Session(bind=bind)
try:
roles_q = sa.select([role_t.c.id]).where(
role_t.c.name.in_([DRPLAN_OPERATOR, ADMIN]))
roles_ids = [x[0] for x in session.execute(roles_q)]
session.execute(assignment_t.delete().where(
assignment_t.c.role_id.in_(roles_ids)))
session.execute(role_action_t.delete().where(
role_action_t.c.role_id.in_(roles_ids)))
session.execute(role_t.delete().where(
role_t.c.id.in_(roles_ids)))
session.commit()
finally:
session.close()


def downgrade():
bind = op.get_bind()
session = Session(bind=bind)
actions = session.query(Action).all()
type_root = session.query(Type).filter_by(name='root').one_or_none()
type_organization = session.query(Type).filter_by(
name='organization').one_or_none()
type_pool = session.query(Type).filter_by(name='pool').one_or_none()
role_drplan_operator = Role(name=DRPLAN_OPERATOR, type_=type_pool,
description='DR plan operator',
lvl_id=type_pool.id, is_active=True)
role_admin = Role(name=ADMIN, type_=type_root,
description='Hystax Admin', lvl_id=type_root.id,
is_active=True)
actions = session.query(Action).all()
for action in actions:
role_admin.assign_action(action)
session.add(role_drplan_operator)
session.add(role_admin)
try:
session.commit()
finally:
session.close()
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
""""fixed_users_unique_index"
Revision ID: cd08c646c952
Revises: 86bb9ebc3c20
Create Date: 2024-01-28 05:01:58.248505
"""
from alembic import op
from sqlalchemy.exc import ProgrammingError


# revision identifiers, used by Alembic.
revision = 'cd08c646c952'
down_revision = '86bb9ebc3c20'
branch_labels = None
depends_on = None


def upgrade():
try:
op.create_index('idx_user_email_unique', 'user',
['email', 'deleted_at'],
unique=True)
except ProgrammingError as exc:
if "Duplicate key name" in str(exc):
pass
else:
raise exc


def downgrade():
pass
2 changes: 1 addition & 1 deletion auth/auth_server/migrate.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def save(self, host, username, password, db, file_name='alembic.ini'):
config.write(fh)


def execute(cmd, path='..'):
def execute(cmd, path='../..'):
LOG.debug('Executing command %s', ''.join(cmd))
myenv = os.environ.copy()
myenv['PYTHONPATH'] = path
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import logging
from diworker.diworker.migrations.base import BaseMigration

"""
Replaced AWSBillingPeriodSearch and AWSRawSearch with one changed index
"""

LOG = logging.getLogger(__name__)
NEW_INDEXES = {
'AWSRawSearch': (
['cloud_account_id', 'bill/BillingPeriodStartDate', 'resource_id'],
{'bill/BillingPeriodStartDate': {'$exists': True}}
)
}
OLD_INDEXES = {
'AWSBillingPeriodSearch': (
['cloud_account_id', 'bill/BillingPeriodStartDate'],
{'bill/BillingPeriodStartDate': {'$exists': True}}
),
'AWSRawSearch': (
['cloud_account_id', 'resource_id', 'bill/BillingPeriodStartDate'],
{'bill/BillingPeriodStartDate': {'$exists': True}}
)
}


class Migration(BaseMigration):
@property
def raw_collection(self):
return self.db.raw_expenses

def rebuild_indexes(self, old_indexes_map, new_indexes_map):
index_list = self.raw_collection.list_indexes()
existing_index_map = {i['name']: i['key'] for i in index_list}
for new_index_name, (new_index_keys, partial_exp) in new_indexes_map.items():
if new_index_name in existing_index_map:
existing_index_keys = list(existing_index_map[new_index_name])
if existing_index_keys == new_index_keys:
LOG.info('Skip index %s - already exists', new_index_name)
continue
self.raw_collection.drop_index(new_index_name)
LOG.info('Dropped index %s', new_index_name)
self.raw_collection.create_index(
[(f, 1) for f in new_index_keys],
name=new_index_name,
background=True,
partialFilterExpression=partial_exp)
LOG.info('Added index %s', new_index_name)
for index_name in old_indexes_map.keys():
if index_name not in new_indexes_map:
self.raw_collection.drop_index(index_name)
LOG.info('Dropped index %s', index_name)

def upgrade(self):
self.rebuild_indexes(OLD_INDEXES, NEW_INDEXES)

def downgrade(self):
self.rebuild_indexes(NEW_INDEXES, OLD_INDEXES)
2 changes: 1 addition & 1 deletion docker_images/booking_observer/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def mongo_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[BOOKING_OBSERVER_TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def process_booking_activities(self, task):
org_id = task.get('organization_id')
Expand Down
2 changes: 1 addition & 1 deletion docker_images/calendar_observer/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def rest_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def observe_calendar(self, task):
org_id = task.get('organization_id')
Expand Down
2 changes: 1 addition & 1 deletion docker_images/herald_executor/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def auth_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def get_auth_users(self, user_ids):
_, response = self.auth_cl.user_list(user_ids)
Expand Down
2 changes: 1 addition & 1 deletion docker_images/keeper_executor/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ def action_func_details_map(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def get_user_id(self, token):
user_digest = hashlib.md5(token.encode('utf-8')).hexdigest()
Expand Down
2 changes: 1 addition & 1 deletion docker_images/organization_violations/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def rest_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[WORKER_TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def publish_activities_tasks(self, tasks):
queue_conn = QConnection('amqp://{user}:{pass}@{host}:{port}'.format(
Expand Down
2 changes: 1 addition & 1 deletion docker_images/resource_observer/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def rest_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def observe_resources(self, task):
org_id = task.get('organization_id')
Expand Down
2 changes: 1 addition & 1 deletion docker_images/resource_violations/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def rest_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def process_violations(self, task):
org_id = task.get('organization_id')
Expand Down
2 changes: 1 addition & 1 deletion docker_images/slacker_executor/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def send(self, type_, params, channel_id=None, team_id=None,

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

@staticmethod
def ts_to_slacker_time_format(timestamp):
Expand Down
2 changes: 1 addition & 1 deletion docker_images/webhook_executor/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def mongo_cl(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def get_environment_meta(self, webhook, meta_info):
_, environment = self.rest_cl.cloud_resource_get(
Expand Down
2 changes: 1 addition & 1 deletion insider/insider_worker/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def discoveries(self):

def get_consumers(self, consumer, channel):
return [consumer(queues=[TASK_QUEUE], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def _process_task(self, task):
start_process_time = int(datetime.utcnow().timestamp())
Expand Down
2 changes: 1 addition & 1 deletion katara/katara_worker/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def herald_routing_key(self):

def get_consumers(self, Consumer, channel):
return [Consumer(queues=[task_queue], accept=['json'],
callbacks=[self.process_task])]
callbacks=[self.process_task], prefetch_count=10)]

def put_herald_task(self, task_params):
exchange = Exchange(type='direct')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { FormattedMessage } from "react-intl";
import DetectedConstraintsHistoryTable from "components/DetectedConstraintsHistoryTable";
import SubTitle from "components/SubTitle";
import TableLoader from "components/TableLoader";
import OrganizationsLimitHitsService from "services/OrganizationsLimitHitsService";
import TypographyLoader from "components/TypographyLoader";
import { isEmpty as isEmptyArray } from "utils/arrays";
import {
EXPENSE_ANOMALY,
Expand All @@ -29,37 +29,28 @@ const mapConstraintTypeToTitleMessageId = (type) => {
);
};

const HistorySection = ({ children, type }) => (
<>
<SubTitle>
<FormattedMessage id={mapConstraintTypeToTitleMessageId(type)} />
</SubTitle>
{children}
</>
);

const DetectedConstraintsHistoryContainer = ({ constraint, isGetConstraintLoading = false }) => {
const { useGet } = OrganizationsLimitHitsService();

const { isLoading, data } = useGet(constraint.id);

if (isLoading || isGetConstraintLoading) {
const DetectedConstraintsHistory = ({ limitHits, constraint, isLoading = false }) => {
if (isLoading) {
return (
<HistorySection>
<>
<TypographyLoader />
<TableLoader columnsCounter={3} />
</HistorySection>
</>
);
}

if (isEmptyArray(data)) {
if (isEmptyArray(limitHits)) {
return null;
}

return (
<HistorySection type={constraint.type}>
<DetectedConstraintsHistoryTable limitHits={data} constraint={constraint} />
</HistorySection>
<>
<SubTitle>
<FormattedMessage id={mapConstraintTypeToTitleMessageId(constraint?.type)} />
</SubTitle>
<DetectedConstraintsHistoryTable limitHits={limitHits} constraint={constraint} />
</>
);
};

export default DetectedConstraintsHistoryContainer;
export default DetectedConstraintsHistory;
3 changes: 3 additions & 0 deletions ngui/ui/src/components/DetectedConstraintsHistory/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import DetectedConstraintsHistory from "./DetectedConstraintsHistory";

export default DetectedConstraintsHistory;
Loading

0 comments on commit c4c8649

Please sign in to comment.