Skip to content

Commit

Permalink
Decreased page size and improved debug logging for jira mapping #277 &
Browse files Browse the repository at this point in the history
  • Loading branch information
SteveMcGrath committed Jul 9, 2024
1 parent 8828e56 commit 5e6cee0
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 12 deletions.
6 changes: 3 additions & 3 deletions tenb2jira/jira/api/iterator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


class JiraIterator(APIIterator):
limit = 1000
limit = 100

def _get_page(self):
params = self.params
Expand All @@ -24,7 +24,7 @@ def _get_page(self):
def search_generator(api: 'JiraAPI',
jql: dict,
fields: list[str],
limit: int = 1000
limit: int = 100
):
query = {
'jql': jql,
Expand All @@ -42,4 +42,4 @@ def search_generator(api: 'JiraAPI',
page = api.issues.search(**query)
page_counter += 1
max_results = page.total
yield page.issues
yield page.issues, max_results, page_counter
27 changes: 18 additions & 9 deletions tenb2jira/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def build_mapping_db_model(self,
model: Base,
fields: dict[str, str],
pk: str,
limit: int = 1000
limit: int = 100
):
"""
Queries Jira and builds the database cache based off of the results of
Expand All @@ -104,31 +104,40 @@ def build_mapping_db_model(self,
jql = (f'project = "{key}" AND issuetype = "{issuetype}" '
f'AND status not in ({cmap})'
)
total = 0
with Session(self.engine) as s:
for page in search_generator(api=self.jira.api,
jql=jql,
fields=list(fields.keys())
):
for ptuple in search_generator(api=self.jira.api,
jql=jql,
fields=list(fields.keys()),
limit=limit
):
page, total, pagenum = ptuple
log.debug(f'Processing page {pagenum}:{limit} of {total}')
issues = []
for issue in page:
item = {}
skip = False
missing = []
for key, value in issue.fields.items():
if value is None:
skip = True
missing.append(key)
if isinstance(value, list):
value = value[0]
item[fields[key]] = value
# item = {fields[k]: v for k, v in issue.fields.items()}
item['updated'] = self.start_time
item['jira_id'] = issue.key
if not skip:
if not missing:
log.debug(f'Adding {issue.key} to cache.')
issues.append(model(**item).asdict())
else:
log.debug(f'Skipping {issue.key} '
f'for missing {missing}')
if issues:
stmt = insert(model).values(issues)\
.prefix_with('OR IGNORE')
s.execute(stmt)
s.commit()
log.debug(f'SQLCache for {model} is {s.query(model).count()} of {total}')

def build_cache(self):
"""
Expand Down Expand Up @@ -445,7 +454,7 @@ def sync(self, cleanup: bool = True):
# log those exceptions and increment the exception counter.
for job in jobs:
if job.exception():
log.exception(job.exception())
log.exception(job.exception(), stack_info=True)
exc_count += 1

# If we have a non-zero value from the exception counter, then
Expand Down

0 comments on commit 5e6cee0

Please sign in to comment.