Skip to content

Commit

Permalink
extract aggregation queries to a new tag
Browse files Browse the repository at this point in the history
  • Loading branch information
svyatogor committed Jul 9, 2019
1 parent ea372b6 commit e987733
Show file tree
Hide file tree
Showing 4 changed files with 160 additions and 110 deletions.
171 changes: 64 additions & 107 deletions backend/renderers/tags/catalog.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,6 @@ import jsonic from 'jsonic'
import SearchService from '../../services/search'
const s = require('sugar')
const _ = require('lodash')
const {ObjectID} = require('mongodb')

const mapValuesDeep = (obj, iteree) =>
_.isArray(obj) ? _.map(obj, v => mapValuesDeep(v, iteree)) : (
_.isObject(obj) ? _.mapValues(obj, v => mapValuesDeep(v, iteree)) : iteree(obj)
)

export class catalog {
constructor(renderContext) {
Expand Down Expand Up @@ -51,122 +45,85 @@ export class catalog {
}
const key = opts.as
const originalValue = ctx[key]
let items
if (opts.aggregate) {
let aggregate = jsonic(opts.aggregate).map(step => mapValuesDeep(step, value => {
if (value && value.toString().startsWith('ID/')) return ObjectID(value.replace('ID/', ''))
else return value
}))
const countResult = await Item.collection.aggregate([...aggregate, {$count: 'count'}]).toArray()
ctx.itemsCount = countResult[0].count
if (opts.pageSize) {
let page = ctx.req.query.page
if (!page || isNaN(page)) {
page = 1
}
ctx.pageNumber = page
ctx.pagesCount = Math.ceil(ctx.itemsCount / opts.pageSize)
aggregate = [...aggregate, {$skip: (page - 1) * opts.pageSize}, {$limit: opts.pageSize}]
}

items = await Item.collection.aggregate(aggregate).toArray()

if (items.length === 0) {
callback(null, elseBody ? elseBody() : '')
return
}

const data = await Promise.all(map(items, async item => {
const currentItem = await item
ctx[key] = currentItem
return asyncBody()
}, {concurrency: 1})) // concurrency is crucial here as body() call reads current context

ctx[key] = originalValue
callback(null, data.join(''))
} else {
const filter = jsonic(opts.filter)
const rawFilter = opts.rawFilter ? eval(`(${opts.rawFilter})`) : {}

let criteria = {
site: ctx.site._id,
deleted: false,
catalog,
...filter,
...rawFilter,
}
const filter = jsonic(opts.filter)
const rawFilter = opts.rawFilter ? eval(`(${opts.rawFilter})`) : {}

let criteria = {
site: ctx.site._id,
deleted: false,
catalog,
...filter,
...rawFilter,
}

if (!isEmpty(opts.search) && opts.search.toString().length > 2) {
if (opts.fullText) {
const ids = await SearchService.simple_search(
opts.search.toString(),
`${catalog}-${ctx.site._id}`,
ctx.req.locale,
opts.search_fields ? opts.search_fields.split(',') : null,
)
criteria['_id'] = {$in: ids}
} else if (opts.search_fields) {
const searchFields = opts.search_fields.split(',')
const safeRegex = opts.search.match(/\w+|"[^"]+"/g).map(t => t.trim().replace(/"/g, '').replace(/[.*+?^${}()|[\]\\]/g, '\\$&'))
const searchExpressions = map(searchFields, field => (
{$or: safeRegex.map(t => ({[field]: {$regex: new RegExp(t), $options: 'i'}}))}
))
criteria = {
$and: [
criteria,
{$or: searchExpressions}
]
}
if (!isEmpty(opts.search) && opts.search.toString().length > 2) {
if (opts.fullText) {
const ids = await SearchService.simple_search(
opts.search.toString(),
`${catalog}-${ctx.site._id}`,
ctx.req.locale,
opts.search_fields ? opts.search_fields.split(',') : null,
)
criteria['_id'] = {$in: ids}
} else if (opts.search_fields) {
const searchFields = opts.search_fields.split(',')
const safeRegex = opts.search.match(/\w+|"[^"]+"/g).map(t => t.trim().replace(/"/g, '').replace(/[.*+?^${}()|[\]\\]/g, '\\$&'))
const searchExpressions = map(searchFields, field => (
{$or: safeRegex.map(t => ({[field]: {$regex: new RegExp(t), $options: 'i'}}))}
))
criteria = {
$and: [
criteria,
{$or: searchExpressions}
]
}
}
}

if (opts.random) {
const sampleRecords = await Item.aggregate().match(criteria).project({_id: 1}).sample(opts.limit)
const ids = sampleRecords.map(e => e._id)
criteria = {_id: {$in: ids}}
}
let itemsQuery = Item.find(criteria)

if (!opts.random) {
if (opts.limit) {
itemsQuery = itemsQuery.limit(opts.limit)
}
if (opts.random) {
const sampleRecords = await Item.aggregate().match(criteria).project({_id: 1}).sample(opts.limit)
const ids = sampleRecords.map(e => e._id)
criteria = {_id: {$in: ids}}
}
let itemsQuery = Item.find(criteria)

if (opts.sort) {
itemsQuery = itemsQuery.sort(opts.sort)
}
if (!opts.random) {
if (opts.limit) {
itemsQuery = itemsQuery.limit(opts.limit)
}

ctx.itemsCount = await Item.count(criteria)
if (opts.pageSize) {
let page = ctx.req.query.page
if (!page || isNaN(page)) {
page = 1
}
ctx.pageNumber = page
ctx.pagesCount = Math.ceil(ctx.itemsCount / opts.pageSize)
itemsQuery = itemsQuery.skip((page - 1) * opts.pageSize).limit(opts.pageSize)
if (opts.sort) {
itemsQuery = itemsQuery.sort(opts.sort)
}
}

items = await itemsQuery


if (items.length === 0) {
callback(null, elseBody ? elseBody() : '')
return
ctx.itemsCount = await Item.count(criteria)
if (opts.pageSize) {
let page = ctx.req.query.page
if (!page || isNaN(page)) {
page = 1
}
ctx.pageNumber = page
ctx.pagesCount = Math.ceil(ctx.itemsCount / opts.pageSize)
itemsQuery = itemsQuery.skip((page - 1) * opts.pageSize).limit(opts.pageSize)
}

const items = await itemsQuery

const data = await Promise.all(map(items, async item => {
const currentItem = await item.toContext(ctx.req)
ctx[key] = currentItem
return asyncBody()
}, {concurrency: 1})) // concurrency is crucial here as body() call reads current context

ctx[key] = originalValue
callback(null, data.join(''))
if (items.length === 0) {
callback(null, elseBody ? elseBody() : '')
return
}

const data = await Promise.all(map(items, async item => {
const currentItem = await item.toContext(ctx.req)
ctx[key] = currentItem
return asyncBody()
}, {concurrency: 1})) // concurrency is crucial here as body() call reads current context

ctx[key] = originalValue
callback(null, data.join(''))
} catch (e) {
console.log(e);
callback(e)
Expand Down
3 changes: 2 additions & 1 deletion backend/renderers/tags/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ export * from './file_list'
export * from './paginate'
export * from './shopping_cart'
export * from './validate'
export * from './load'
export * from './load'
export * from './query'
3 changes: 1 addition & 2 deletions backend/renderers/tags/load.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import {defaults, map, isEmpty} from 'lodash'
import {defaults} from 'lodash'
import Promise from 'bluebird'
import {Item} from '../../models'
import jsonic from 'jsonic'

export class load {
constructor(renderContext) {
Expand Down
93 changes: 93 additions & 0 deletions backend/renderers/tags/query.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
import {defaults} from 'lodash'
import Promise from 'bluebird'
import {Item} from '../../models'
import jsonic from 'jsonic'
import _ from 'lodash'
const {ObjectID} = require('mongodb')

const mapValuesDeep = (obj, iteree) =>
_.isArray(obj) ? _.map(obj, v => mapValuesDeep(v, iteree)) : (
_.isObject(obj) ? _.mapValues(obj, v => mapValuesDeep(v, iteree)) : iteree(obj)
)

export class query {
constructor(renderContext) {
this.tags = ['query']
}

parse(parser, nodes, lexer) {
const tok = parser.nextToken();

const catalog = parser.parseSignature(null, true);
parser.advanceAfterBlockEnd(tok.value);

let body = parser.parseUntilBlocks('else', 'endquery');
var elseBody = null;

if(parser.skipSymbol('else')) {
parser.skip(lexer.TOKEN_BLOCK_END);
elseBody = parser.parseUntilBlocks('endquery');
}

parser.advanceAfterBlockEnd();

return new nodes.CallExtensionAsync(this, 'run', catalog, [body, elseBody]);
}

async run({ctx}, collection, options, body, elseBody, callback) {
if (ctx.inspect) {
return callback(null, '')
}

const opts = defaults(options, {as: 'item'})
const key = opts.as
const originalValue = ctx[key]
const asyncBody = Promise.promisify(body)

try {
let aggregate = jsonic(options.query).map(step => mapValuesDeep(step, value => {
if (value && value.toString().startsWith('ID/')) return ObjectID(value.replace('ID/', ''))
else return value
}))
aggregate = [
{
$match: {
site: ObjectID(ctx.req.site._id)
}
},
...aggregate
]

if (opts.pageSize) {
const countResult = await Item.collection.aggregate([...aggregate, {$count: 'count'}]).toArray()
ctx.itemsCount = countResult[0].count
let page = ctx.req.query.page
if (!page || isNaN(page)) {
page = 1
}
ctx.pageNumber = page
ctx.pagesCount = Math.ceil(ctx.itemsCount / opts.pageSize)
aggregate = [...aggregate, {$skip: (page - 1) * opts.pageSize}, {$limit: opts.pageSize}]
}

const items = await Item.collection.aggregate(aggregate).toArray()

if (items.length === 0) {
callback(null, elseBody ? elseBody() : '')
return
}

const data = await Promise.all(_.map(items, async item => {
const currentItem = await item
ctx[key] = currentItem
return asyncBody()
}, {concurrency: 1})) // concurrency is crucial here as body() call reads current context

ctx[key] = originalValue
callback(null, data.join(''))
} catch (e) {
console.log(e);
callback(e)
}
}
}

0 comments on commit e987733

Please sign in to comment.