Skip to content

Commit

Permalink
Spec Flexible Contributions Filtering (WICG#1290)
Browse files Browse the repository at this point in the history
* Spec Flexible COntributions Filtering

* Update explainer

* update header-validator

* small fixes

* Remove post-parsing distinctions

* parse -> validate

* small fixes

* small fixes

* update contributions validator

* small fixes

* spec source registration time

* formatting

* small fixes

* Validate the size like in the spec

* Add filtering_id key

* Improve PR

* Merge branch main into spec-ara-flexbilit-contribution-filtering

* fix merge

* fix lint

* Use 0 filtering ID for debug reporting contributions

* remove duplicate tests cases

* use constant instead of 0

* improve PR

* re-order description

* fix validator

* improve PR

* small improvements

* remove ctx

* Improve PR
  • Loading branch information
agarant authored Jun 14, 2024
1 parent bf3f812 commit c5119ec
Show file tree
Hide file tree
Showing 8 changed files with 442 additions and 69 deletions.
45 changes: 44 additions & 1 deletion AGGREGATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,9 @@ encoded. The map will have the following structure:
"operation": "histogram", // Allows for the service to support other operations in the future
"data": [{
"bucket": <bucket, encoded as a 16-byte (i.e. 128-bit) big-endian bytestring>,
"value": <value, encoded as a 4-byte (i.e. 32-bit) big-endian bytestring>
"value": <value, encoded as a 4-byte (i.e. 32-bit) big-endian bytestring>,
// k is equal to the value `aggregatable_filtering_id_max_bytes`, defaults to 1 (i.e. 8-bit).
"id": <filtering ID, encoded as a k-byte big-endian bytestring, defaults to 0>
}, ...]
}
```
Expand Down Expand Up @@ -507,6 +509,47 @@ A similar design was proposed for the
[Private Aggregation API](https://github.com/patcg-individual-drafts/private-aggregation-api/blob/main/report_verification.md#shared-storage)
for the purpose of report verification.

### Optional: flexible contribution filtering with filtering IDs

Trigger registration's `aggregatable_values`'s values can be integers or
dictionaries with an optional `filtering_id` field.

```jsonc
{
..., // existing fields
"aggregatable_filtering_id_max_bytes": 2, // defaults to 1
"aggregatable_values": {
"campaignCounts": 32768,
"geoValue": {
"value": 1664,
"filtering_id": "23" // must fit within <aggregatable_filtering_id_max_bytes> bytes
}
}
}
```

These IDs will be included in the encrypted aggregatable report payload
contributions.

Queries to the aggregation service can provide a list of allowed filtering IDs
and all contributions with non-allowed IDs will be filtered out.

The filtering IDs need to be unsigned integers limited to a small number of
bytes, (1 byte = 8 bits) by default. We limit the size of the ID space to
prevent unnecessarily increasing the payload size and thus storage and
processing costs.

This size can be increased via the `aggregatable_filtering_id_max_bytes` field.
To avoid amplifying a counting attack due to the resulting different payload
size, the browser will unconditionally send an aggregatable report on every
trigger registration with a non-default (greater than 1) max bytes. A null report
will be sent in the case that the trigger registration did not generate an
attribution report. The source registration time will always be excluded from
the aggregatable report with a non-default max bytes. This behavior is the same
as when a trigger context ID is set.

See [flexible_filtering.md](https://github.com/patcg-individual-drafts/private-aggregation-api/blob/main/flexible_filtering.md) for more details.

## Data processing through a Secure Aggregation Service

The exact design of the service is not specified here. We expect to have more
Expand Down
148 changes: 127 additions & 21 deletions index.bs

Large diffs are not rendered by default.

6 changes: 6 additions & 0 deletions ts/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ export const defaultTriggerDataCardinality: Readonly<
[SourceType.navigation]: 8n,
}

export const defaultAggregatableFilteringIdMaxBytes: number = 1

export const maxAggregatableFilteringIdMaxBytesValue: number = 8

export const defaultFilteringIdValue: bigint = 0n

export const sourceAggregatableDebugTypes: Readonly<[string, ...string[]]> = [
'source-channel-capacity-limit',
'source-destination-global-rate-limit',
Expand Down
30 changes: 16 additions & 14 deletions ts/src/header-validator/aggregatable-contributions.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ void test('basic', () => {
const aggregatableValuesCfgs: AggregatableValuesConfiguration[] = [
{
values: new Map([
['key1', 32768],
['key2', 1664],
['key1', { value: 32768, filteringId: 25n }],
['key2', { value: 1664, filteringId: 0n }],
]),
positive: [],
negative: [],
Expand All @@ -109,10 +109,12 @@ void test('basic', () => {
{
key: 1369n,
value: 32768,
filteringId: 25n,
},
{
key: 2693n,
value: 1664,
filteringId: 0n,
},
])
})
Expand Down Expand Up @@ -151,7 +153,7 @@ void test('values-filtered', async (t) => {
assert.deepEqual(
createWith([
{
values: new Map([['key1', 32768]]),
values: new Map([['key1', { value: 32768, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['2'])]]),
Expand All @@ -169,7 +171,7 @@ void test('values-filtered', async (t) => {
assert.deepEqual(
createWith([
{
values: new Map([['key1', 32768]]),
values: new Map([['key1', { value: 32768, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['2'])]]),
Expand All @@ -179,7 +181,7 @@ void test('values-filtered', async (t) => {
negative: [],
},
{
values: new Map([['key2', 1664]]),
values: new Map([['key2', { value: 1664, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['1'])]]),
Expand All @@ -189,15 +191,15 @@ void test('values-filtered', async (t) => {
negative: [],
},
]),
[{ key: 1029n, value: 1664 }]
[{ key: 1029n, value: 1664, filteringId: 0n }]
)
)

await t.test('second-entry-ignored', () =>
assert.deepEqual(
createWith([
{
values: new Map([['key1', 32768]]),
values: new Map([['key1', { value: 32768, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['1'])]]),
Expand All @@ -207,7 +209,7 @@ void test('values-filtered', async (t) => {
negative: [],
},
{
values: new Map([['key2', 1664]]),
values: new Map([['key2', { value: 1664, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['1'])]]),
Expand All @@ -217,15 +219,15 @@ void test('values-filtered', async (t) => {
negative: [],
},
]),
[{ key: 1369n, value: 32768 }]
[{ key: 1369n, value: 32768, filteringId: 0n }]
)
)

await t.test('filters-matched-keys-mismatched-no-contributions', () =>
assert.deepEqual(
createWith([
{
values: new Map([['key3', 32768]]),
values: new Map([['key3', { value: 32768, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['1'])]]),
Expand All @@ -237,7 +239,7 @@ void test('values-filtered', async (t) => {
// Shouldn't contribute as only the first aggregatable values
// entry with matching filters is considered
{
values: new Map([['key2', 1664]]),
values: new Map([['key2', { value: 1664, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['1'])]]),
Expand All @@ -255,7 +257,7 @@ void test('values-filtered', async (t) => {
assert.deepEqual(
createWith([
{
values: new Map([['key1', 32768]]),
values: new Map([['key1', { value: 32768, filteringId: 0n }]]),
positive: [],
negative: [
{
Expand All @@ -265,7 +267,7 @@ void test('values-filtered', async (t) => {
],
},
{
values: new Map([['key2', 1664]]),
values: new Map([['key2', { value: 1664, filteringId: 0n }]]),
positive: [
{
map: new Map([['product', new Set(['1'])]]),
Expand All @@ -275,7 +277,7 @@ void test('values-filtered', async (t) => {
negative: [],
},
]),
[{ key: 1029n, value: 1664 }]
[{ key: 1029n, value: 1664, filteringId: 0n }]
)
)
})
3 changes: 2 additions & 1 deletion ts/src/header-validator/aggregatable-contributions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import {
export type AggregatableContribution = {
key: bigint
value: number
filteringId: bigint
}

// https://wicg.github.io/attribution-reporting-api/#create-aggregatable-contributions-from-aggregation-keys-and-aggregatable-values
Expand All @@ -24,7 +25,7 @@ function createAggregatableContributionsFromKeysAndValues(
if (value === undefined) {
continue
}
contributions.push({ key, value })
contributions.push({ key, ...value })
}
return contributions
}
Expand Down
22 changes: 19 additions & 3 deletions ts/src/header-validator/to-json.ts
Original file line number Diff line number Diff line change
Expand Up @@ -309,17 +309,30 @@ function serializeAggregatableTriggerDatum(
}
}

export type AggregatableValues = {
[key: string]: {
value: number
filtering_id: string
}
}

export type AggregatableValuesConfiguration = FilterPair & {
values: { [key: string]: number }
values: AggregatableValues
}

function serializeAggregatableValuesConfiguration(
c: parsed.AggregatableValuesConfiguration
): AggregatableValuesConfiguration {
const values: AggregatableValues = {}
for (const [key, value] of c.values.entries()) {
values[key] = {
value: value.value,
filtering_id: value.filteringId.toString(),
}
}
return {
...serializeFilterPair(c),

values: Object.fromEntries(c.values.entries()),
values,
}
}

Expand All @@ -328,6 +341,7 @@ export type Trigger = CommonDebug &
aggregatable_deduplication_keys: AggregatableDedupKey[]
aggregatable_source_registration_time: string
aggregatable_trigger_data: AggregatableTriggerDatum[]
aggregatable_filtering_id_max_bytes: number
aggregatable_values: AggregatableValuesConfiguration[]
aggregation_coordinator_origin: string
event_trigger_data: EventTriggerDatum[]
Expand Down Expand Up @@ -355,6 +369,8 @@ export function serializeTrigger(
serializeAggregatableTriggerDatum
),

aggregatable_filtering_id_max_bytes: t.aggregatableFilteringIdMaxBytes,

aggregatable_values: Array.from(
t.aggregatableValuesConfigurations,
serializeAggregatableValuesConfiguration
Expand Down
Loading

0 comments on commit c5119ec

Please sign in to comment.