diff --git a/package.json b/package.json index ca9492ee..79f070d2 100644 --- a/package.json +++ b/package.json @@ -16,12 +16,14 @@ "io-ts": "2.2.9", "knex": "0.21.1", "lodash": "^4.17.21", + "node-fetch": "~2.6.1", "pg": "^8.5.1" }, "devDependencies": { "@types/hapi__hapi": "20.0.8", "@types/lodash": "^4.14.170", "@types/node": "^14.0.5", + "@types/node-fetch": "^2.5.7", "@types/restify": "8.4.2", "@unocha/hpc-repo-tools": "^0.1.3", "eslint": "^7.29.0", diff --git a/src/auth/cache.ts b/src/auth/cache.ts new file mode 100644 index 00000000..51fb3f36 --- /dev/null +++ b/src/auth/cache.ts @@ -0,0 +1,79 @@ +/** + * Lightweight in-memory cache to speed-up authorization processes. + * + * TODO: extend this with some cross-container cache such as redis or memcached + */ + +import { createHash } from 'crypto'; + +const sha256 = (str: string) => createHash('sha256').update(str).digest('hex'); + +export type HIDInfo = { + userId: string; + given_name: string; + family_name: string; + email: string; +}; + +export type HIDResponse = + | { + type: 'success'; + info: HIDInfo; + } + | { + type: 'forbidden'; + message: string; + }; + +type CachedValue = { + value: T; + time: Date; +}; + +export class HashTableCache { + /** + * The number of milliseconds a value should remain valid for + */ + private readonly cacheItemLifetimeMs: number; + + private map = new Map>(); + + constructor(opts: { cacheItemLifetimeMs: number }) { + this.cacheItemLifetimeMs = opts.cacheItemLifetimeMs; + } + + public store = (key: string, value: V, cacheTime?: Date): void => { + this.map.set(sha256(key), { + value, + time: cacheTime || new Date(), + }); + this.clearExpiredValues(); + }; + + public get = (key: string): V | null => { + const item = this.map.get(sha256(key)); + + if (item && item.time.getTime() + this.cacheItemLifetimeMs > Date.now()) { + return item.value; + } + + return null; + }; + + public clearExpiredValues = (): void => { + const now = Date.now(); + for (const [key, item] of this.map.entries()) { + if (item.time.getTime() + this.cacheItemLifetimeMs < now) { + this.map.delete(key); + } + } + }; + + public clear = (): void => this.map.clear(); + + public size = (): number => this.map.size; +} + +export const HID_CACHE = new HashTableCache({ + cacheItemLifetimeMs: 5 * 60 * 1000, // 5 minutes +}); diff --git a/src/auth/hid.ts b/src/auth/hid.ts new file mode 100644 index 00000000..668c18e1 --- /dev/null +++ b/src/auth/hid.ts @@ -0,0 +1,92 @@ +import * as t from 'io-ts'; +import * as fetch from 'node-fetch'; + +import { URL } from 'url'; +import Context from '../Context'; +import { Request } from '../Request'; +import { ForbiddenError } from '../util/error'; +import { HIDInfo, HID_CACHE } from './cache'; + +// Export for mocking purposes inside test cases +export { fetch }; + +const HID_ACCOUNT_INFO = t.type({ + given_name: t.string, + family_name: t.string, + user_id: t.string, + email: t.string, +}); + +/** + * Use the Authentication headers to get the HID token for the request, + * and use that to determine what HID user the user is logged-in as by + * requesting account information from HID using the token. + * + * If this has already been done for the current request, the previous result + * will be returned and no requests will be made. Therefore it is safe and + * efficient to use this function multiple times. + */ +export const getHidInfo = async ( + context: Pick +): Promise<(HIDInfo & Request['apiAuth']) | undefined> => { + const { token, request, config } = context; + if (!token || !request || !config) { + return undefined; + } + const existing = HID_CACHE.get(token); + + if (existing) { + if (existing.type === 'success') { + request.apiAuth = { + ...(request.apiAuth || {}), + userId: existing.info.userId, + }; + + return existing.info; + } + + throw new ForbiddenError(existing.message); + } else { + const accountUrl = new URL('/account.json', config.authBaseUrl); + + const res = await fetch.default(accountUrl, { + headers: { + Authorization: `Bearer ${token}`, + }, + }); + + if (!res.ok) { + if (res.status === 401) { + const r = await res.json(); + const message = r.message || 'Invalid Token'; + HID_CACHE.store(token, { type: 'forbidden', message }); + throw new ForbiddenError(message); + } else { + throw new ForbiddenError( + `Unexpected error from HID: ${res.statusText}` + ); + } + } + + const data = await res.json(); + + if (!HID_ACCOUNT_INFO.is(data)) { + throw new ForbiddenError('Got invalid data from HID'); + } + + const info: HIDInfo = { + userId: data.user_id, + family_name: data.family_name, + given_name: data.given_name, + email: data.email, + }; + HID_CACHE.store(token, { type: 'success', info }); + + request.apiAuth = { + ...(request.apiAuth || {}), + userId: info.userId, + }; + + return info; + } +}; diff --git a/src/auth/index.ts b/src/auth/index.ts new file mode 100644 index 00000000..45ea0cb5 --- /dev/null +++ b/src/auth/index.ts @@ -0,0 +1,655 @@ +import * as crypto from 'crypto'; +import { promisify } from 'util'; +import Context from '../Context'; +import authGrant from '../db/models/authGrant'; +import { default as authGrantee } from '../db/models/authGrantee'; +import authInvite from '../db/models/authInvite'; +import { AuthTargetId, default as authTarget } from '../db/models/authTarget'; +import { default as authToken } from '../db/models/authToken'; +import governingEntity from '../db/models/governingEntity'; +import operationCluster from '../db/models/operationCluster'; +import { + default as participant, + ParticipantId, +} from '../db/models/participant'; +import participantRole from '../db/models/participantRole'; +import role from '../db/models/role'; +import rolePermittedAction from '../db/models/rolePermittedAction'; +import { organizeObjectsByUniqueProperty } from '../db/util'; +import { createDeferredFetcher } from '../db/util/deferred'; +import { InstanceOfModel } from '../db/util/types'; +import { + createRootContext, + createRootContextFromRequest, + LogContext, +} from '../logging/logging'; +import { isDefined } from '../util'; +import { createBrandedValue } from '../util/types'; +import * as hid from './hid'; +import * as perms from './permissions'; +import { PermittedActionIdString } from './role.data'; +import { + calculatePermissionsFromRolesGrant, + filterValidRoleStrings, + RolesGrant, +} from './roles'; + +const randomBytes = promisify(crypto.randomBytes); + +export const P = perms.AUTH_PERMISSIONS; + +type ParticipantInstance = InstanceOfModel>; +type AuthGranteeInstance = InstanceOfModel>; +type AuthTargetInstance = InstanceOfModel>; +type AuthTokenInstance = InstanceOfModel>; + +/** + * Extend the existing permitted actions with new actions. + * + * Right now, lists of permitted actions for each role are stored in + * the database using the tables permittedAction and rolePermittedAction. + * + * TODO: These tables should be removed, + * and the mapping should be stored in code instead. + * + * In the meantime, for new code requiring new actions, + * they can be added to this file and avoid the need for a migration or + * modifying roles in any way. + */ +type ExtendedPermittedAction = PermittedActionIdString | perms.GlobalPermission; + +type LegacySyncMethod = (param: { + actor: ParticipantId; + log: LogContext; + target: AuthTargetInstance; + grantee: AuthGranteeInstance; +}) => Promise; + +/** + * Mapping from role name to additional actions to grant in addition to those + * in the database. + * + * These additional permissions apply regardless as to whether or not a + * participantRole is tied to a specific object or not. + * + * @deprecated + */ +const ADDITIONAL_ACTIONS: { + [id: string]: ExtendedPermittedAction[] | undefined; +} = { + hpcadmin: Object.values(perms.AUTH_PERMISSIONS.global), + planlead: ['canCreateOrganizations', 'viewCategories'], +}; + +const activateInvitesForEmail = async ( + context: Pick, + participantId: ParticipantId, + email: string, + log: LogContext, + syncWithLegacy?: LegacySyncMethod +) => { + const invites = await authInvite(context.connection).find({ + where: { email }, + }); + + // Activate any invitations + if (invites.length > 0) { + const grantee = await authGrantee(context.connection).create({ + type: 'user', + granteeId: participantId, + }); + const targets = await authTarget(context.connection).find({ + where: (builder) => + builder.whereIn( + 'id', + invites.map((i) => i.target) + ), + }); + + const targetsMap = new Map(); + for (const obj of targets) { + targetsMap.set(obj.id, obj); + } + + for (const invite of invites) { + const target = targetsMap.get(invite.target); + /* istanbul ignore if - should not be possible due to sequelize constraints */ + if (!target) { + throw new Error('Missing target'); + } + + await authGrant(context.connection).create( + { + grantee: grantee.id, + target: invite.target, + roles: invite.roles, + }, + invite.actor, + invite.updatedAt + ); + + if (syncWithLegacy) { + await syncWithLegacy({ + grantee, + target, + actor: invite.actor, + log, + }); + } + } + + await authInvite(context.connection).destroy({ + where: { + email, + }, + }); + } +}; + +export const getLoggedInParticipant = async ( + context: Context, + log?: LogContext, + syncWithLegacy?: LegacySyncMethod +): Promise => { + // Check if we already have a participant for this request + if (context.request.apiAuth?.participant) { + return context.request.apiAuth.participant; + } + + if (!log) { + const path = + typeof context.request.path === 'string' + ? context.request.path + : context.request.path?.(); + + log = createRootContext( + { + hpc: { + req: { + method: context.request.method, + path, + origin: context.request.headers?.origin, + }, + }, + }, + context.config + ); + } + + if (!context.request) { + throw new Error('Request object not provided'); + } + + const tokenPromise = getParticipantFromToken(context); + + const hidPromise = hid + .getHidInfo(context) + .then((hidInfo) => ({ result: 'success' as const, hidInfo })) + .catch((error) => ({ result: 'error' as const, error })); + + // Check if we have our own token before we check HID + const tokenParticipant = await tokenPromise; + if (tokenParticipant) { + if (tokenParticipant.hidId) { + if (!context.request.apiAuth) { + context.request.apiAuth = { + userId: tokenParticipant.hidId, + }; + } + context.request.apiAuth.participant = tokenParticipant; + } + return tokenParticipant; + } + + // We don't have our own token, check HID + const hidInfoResult = await hidPromise; + if (hidInfoResult.result === 'error') { + throw hidInfoResult.error; + } + const { hidInfo } = hidInfoResult; + if (!hidInfo?.userId) { + return undefined; + } + hidInfo.participant = + (await participant(context.connection).findOne({ + where: { hidId: hidInfo.userId }, + })) || undefined; + if (!hidInfo.participant) { + // Create a new participant for this HID account + // and transfer over all invites + hidInfo.participant = await participant(context.connection).create({ + email: hidInfo.email, + hidId: hidInfo.userId, + name_given: hidInfo.given_name, + name_family: hidInfo.family_name, + }); + + await activateInvitesForEmail( + context, + hidInfo.participant.id, + hidInfo.email, + log, + syncWithLegacy + ); + } else if (hidInfo.participant.email !== hidInfo.email) { + // Update the user's email and activate any invites + hidInfo.participant = ( + await participant(context.connection).update({ + values: { + email: hidInfo.email, + }, + where: { + hidId: createBrandedValue(hidInfo.userId), + }, + }) + )[0]; + + await activateInvitesForEmail( + context, + hidInfo.participant.id, + hidInfo.email, + log, + syncWithLegacy + ); + } + + if (!context.request.apiAuth) { + context.request.apiAuth = { + userId: hidInfo.userId, + }; + } + + context.request.apiAuth.participant = hidInfo.participant; + + return hidInfo.participant; +}; + +/** + * Check whether the specified action is allowed to be performed by the actor + * behind the current request. + */ +export const actionIsPermitted = async ( + context: Context, + condition: perms.RequiredPermissionsCondition +): Promise => { + if (condition === 'anyone') { + return true; + } + + return perms.hasRequiredPermissions( + await calculatePermissions(context), + condition + ); +}; + +/** + * Validate the given target, and roles for the given target, + * returning a RolesGrant instance if valid, and throwing an error if not. + */ +export const calculateRolesGrantFromTargetAndRoleStrings = ( + target: AuthTargetInstance, + roles: string[], + logContext: LogContext +): RolesGrant => { + if (target.type === 'global') { + return { + type: 'global', + roles: filterValidRoleStrings('global', roles, logContext), + }; + } else if (target.targetId) { + if (target.type === 'operation') { + return { + type: 'operation', + id: target.targetId, + roles: filterValidRoleStrings('operation', roles, logContext), + }; + } else if (target.type === 'operationCluster') { + return { + type: 'operationCluster', + id: target.targetId, + roles: filterValidRoleStrings('operationCluster', roles, logContext), + }; + } else if (target.type === 'plan') { + return { + type: 'plan', + id: target.targetId, + roles: filterValidRoleStrings('plan', roles, logContext), + }; + } else if (target.type === 'project') { + return { + type: 'project', + id: target.targetId, + roles: filterValidRoleStrings('project', roles, logContext), + }; + } else if (target.type === 'governingEntity') { + return { + type: 'governingEntity', + id: target.targetId, + roles: filterValidRoleStrings('governingEntity', roles, logContext), + }; + } + } + + throw new Error(`Invalid authTarget: ${target.type}:${target.targetId}`); +}; + +export const getRoleGrantsForUsers = async ({ + context, + users, + logContext, +}: { + context: Pick; + users: ParticipantId[]; + logContext: LogContext; +}): Promise> => { + // Get the grantees + const grantees = await authGrantee(context.connection).find({ + where: (builder) => + builder.where('type', 'user').whereIn('granteeId', users), + }); + + if (grantees.length === 0) { + return new Map(); + } + + const granteesById = organizeObjectsByUniqueProperty(grantees, 'id'); + const grants = await authGrant(context.connection).find({ + where: (builder) => + builder.whereIn( + 'grantee', + grantees.map((g) => g.id) + ), + }); + const targets = await authTarget(context.connection).find({ + where: (builder) => + builder.whereIn( + 'id', + grants.map((g) => g.target) + ), + }); + const targetsById = organizeObjectsByUniqueProperty(targets, 'id'); + + const result = new Map(); + + for (const grant of grants) { + const grantee = granteesById.get(grant.grantee); + if (!grantee) { + throw new Error(`Unexpected missing grantee`); + } + + const target = targetsById.get(grant.target); + if (!target) { + throw new Error('Unexpected missing target'); + } + + const participantId: ParticipantId = createBrandedValue(grantee.granteeId); + let list = result.get(participantId); + if (!list) { + list = []; + result.set(participantId, list); + } + + list.push( + calculateRolesGrantFromTargetAndRoleStrings( + target, + grant.roles, + logContext + ) + ); + } + + return result; +}; + +export const getRoleGrantsForUser = async ({ + context, + user, + logContext, +}: { + context: Pick; + user: ParticipantId; + logContext: LogContext; +}): Promise => + ( + await getRoleGrantsForUsers({ + context, + users: [user], + logContext, + }) + ).get(user) || []; + +/** + * Calculate the complete set of permissions for the actor of the current req + * + * TODO: cache this calculation, and also store it in the request object + */ +export const calculatePermissions = async ( + context: Context +): Promise => { + const allowed: perms.GrantedPermissions = {}; + + const logContext = await createRootContextFromRequest(context); + + // Both legacy and new permissions require us to know the current participant + // rather than require both simultaneously race to calculate this, + // calculate it once here (as it's cached) + const participant = await getLoggedInParticipant(context, logContext); + if (!participant) { + return allowed; + } + + const mergeAllowedPermissions = < + Type extends keyof Omit + >( + type: Type, + additions: Map>> + ) => { + let existingMap = allowed[type] as + | Map>> + | undefined; + if (!existingMap) { + existingMap = allowed[type] = new Map(); + } + for (const [key, permissions] of additions.entries()) { + let set = existingMap.get(key); + if (!set) { + existingMap.set(key, (set = new Set())); + } + for (const p of permissions) { + set.add(p); + } + } + }; + + const addLegacyPermissions = async () => { + const globallyPermitted = await getLegacyGloballyPermittedActions(context); + if (globallyPermitted.size === 0) { + return; + } + allowed.global = allowed.global || new Set(); + for (const permission of globallyPermitted) { + allowed.global.add(permission); + } + }; + + const addGrantedPermissions = async () => { + const grants = await getRoleGrantsForUser({ + context, + user: participant.id, + logContext, + }); + const fetchers = { + governingEntity: createDeferredFetcher( + governingEntity(context.connection) + ), + operationCluster: createDeferredFetcher( + operationCluster(context.connection) + ), + }; + // Calculate the allowed permissions for each of these grants + const allowedFromGrants = await Promise.all( + grants.map((grant) => calculatePermissionsFromRolesGrant(grant, fetchers)) + ); + // Merge these granted permissions + for (const granted of allowedFromGrants) { + if (!granted) { + continue; + } + if (granted.global) { + allowed.global = allowed.global || new Set(); + for (const p of granted.global) { + allowed.global.add(p); + } + } + for (const [key, obj] of Object.entries(granted) as [ + keyof typeof granted, + any + ][]) { + if (key !== 'global') { + mergeAllowedPermissions(key, obj); + } + } + } + }; + + await Promise.all([addLegacyPermissions(), addGrantedPermissions()]); + + return allowed; +}; + +/** + * TODO: implement cache (with auto-expiration) + * + * TODO: once all global roles have been migrated + * to the new authentication system, this should be updated + */ +export const getLegacyGloballyPermittedActions = async ( + context: Context +): Promise> => { + const participant = await getLoggedInParticipant(context); + if (!participant) { + return new Set(); + } + const permittedActionStrings: ExtendedPermittedAction[] = []; + const participantRoles = await participantRole(context.connection).find({ + where: { + participantId: participant.id, + }, + }); + + /** + * Role IDs that apply to the current user that may/may not be associated + * with a particular object. + */ + const anyRoleIDs = participantRoles.map((pr) => pr.roleId).filter(isDefined); + /** + * Role IDs that apply to the current user and are NOT associated with a + * particular object. + */ + const globalRoleIDs = participantRoles + .filter((pr) => !pr.objectType && !pr.objectId) + .map((pr) => pr.roleId) + .filter(isDefined); + + // Add the extended / additional actions + // (these apply regardless of object association) + for (const r of await role(context.connection).find({ + where: (builder) => builder.whereIn('id', anyRoleIDs), + })) { + permittedActionStrings.push(...(ADDITIONAL_ACTIONS[r.name || ''] || [])); + } + + // Add the legacy permitted actions (only apply when not associated) + + // TODO this should be in code, and shouldn't require a DB query + // Add the permitted actions only for the roles that are not tied to any + // particular object (i.e. are global). + const permittedActions = await rolePermittedAction(context.connection).find({ + where: (builder) => builder.whereIn('roleId', globalRoleIDs), + }); + permittedActionStrings.push( + ...permittedActions.map((a) => a.permittedActionId).filter(isDefined) + ); + + return new Set(permittedActionStrings); +}; + +/** + * Check if the token for the current request is a custom HPC token that's + * stored in our database. + */ +export const getParticipantFromToken = async ( + context: Pick +): Promise => { + const { request, config, connection } = context; + if (!request) { + return null; + } + if ( + !request.authorization?.credentials || + (request.authorization?.scheme !== 'Bearer' && + request.authorization?.scheme !== 'Basic') + ) { + return null; + } + + if ( + request.authorization?.scheme === 'Basic' && + request.authorization.basic?.username !== config?.basicAuthUser + ) { + return null; + } + + const token = request.authorization?.basic + ? request.authorization.basic.password + : request.authorization?.credentials; + + const tokenHash = crypto + .createHash('sha256') + .update(token) + .digest() + .toString('hex'); + + const proxy = await authToken(connection).findOne({ + where: { tokenHash }, + }); + + if (proxy && (!proxy.expires || proxy.expires.getTime() > Date.now())) { + return participant(connection).findOne({ + where: { id: proxy.participant }, + }); + } + return null; +}; + +/** + * Create a new access token for the given user. + */ +export const createToken = async ({ + context, + participant, + expires, +}: { + context: Pick; + participant: ParticipantId; + expires?: Date; +}): Promise<{ + instance: AuthTokenInstance; + token: string; +}> => { + const token = (await randomBytes(48)).toString('hex'); + const tokenHash = crypto + .createHash('sha256') + .update(token) + .digest() + .toString('hex'); + + return { + instance: await authToken(context.connection).create({ + tokenHash, + participant, + expires, + }), + token, + }; +}; diff --git a/src/auth/permissions.ts b/src/auth/permissions.ts new file mode 100644 index 00000000..f6192bc2 --- /dev/null +++ b/src/auth/permissions.ts @@ -0,0 +1,273 @@ +import { PermittedActionIdString } from './role.data'; + +/** + * A mapping from target types to permissions that can apply to those targets + */ +export const AUTH_PERMISSIONS = { + global: { + RUN_ADMIN_COMMANDS: 'canRunAdminCommands', + MODIFY_ACCESS_AND_PERMISSIONS: 'canModifyAccessAndPermissions', + /** + * Can modify the access and permissions of operations, + * or any object nested underneath an operation + * (like an operation cluster). + */ + MODIFY_OPERATION_ACCESS_AND_PERMISSIONS: + 'canModifyOperationAccessAndPermissions', + ADD_OPERATION: 'canAddOperation', + /** + * Can view ALL the data for operations or any nested information + * + * (equivalent to operation.VIEW_DATA for every operation) + */ + VIEW_OPERATION_DATA: 'viewOperationData', + /** + * Can list operations and view the metadata for any operation + * + * (equivalent to operation.VIEW_METADATA for every operation) + */ + VIEW_OPERATION_METADATA: 'viewOperationMetadata', + /** + * Can list operations and view the metadata for assigned operations + * + * This permission is expected to be present whenever a user has the + * `VIEW_METADATA` permission for an operation + */ + VIEW_ASSIGNED_OPERATION_METADATA: 'viewPermittedOperationMetadata', + CREATE_ORGANIZATIONS: 'canCreateOrganizations', + /** + * Can edit the raw data of any assignment that is a form + */ + EDIT_FORM_ASSIGNMENT_RAW_DATA: 'editFormAssignmentRawData', + /** + * Can edit the clean data of any assignment that is a form + */ + EDIT_FORM_ASSIGNMENT_CLEAN_DATA: 'editFormAssignmentCleanData', + /** + * Can view the data associated with any plan + */ + VIEW_ANY_PLAN_DATA: 'viewAnyPlanData', + /** + * Can edit the data associated with any plan (when that plan is editable) + */ + EDIT_ANY_PLAN_DATA: 'editAnyPlanData', + VIEW_ANY_FLOW: 'viewAnyFlow', + EDIT_ANY_FLOW: 'editAnyFlow', + VIEW_CATEGORIES: 'viewCategories', + EDIT_CATEGORIES: 'editCategories', + /** + * Can move projects associated with any plan to any step of the workflow. + */ + PROJECT_WORKFLOW_MOVE_TO_ANY_STEP: 'projectWorkflowMoveToAnyStep', + }, + operation: { + /** + * Can view ALL the data for the operation and underlying clusters + */ + VIEW_DATA: 'canViewData', + /** + * Can view the high-level metadata for the operation (e.g. name), + * but no underlying objects (e.g. clusters) + */ + VIEW_METADATA: 'canViewMetadata', + /** + * Can view the high-level metadata for any of the clusters under this op + */ + VIEW_CLUSTER_METADATA: 'canViewClusterMetadata', + /** + * Can modify the access and permissions of any of its clusters + */ + MODIFY_CLUSTER_ACCESS_AND_PERMISSIONS: + 'canModifyClusterAccessAndPermissions', + CREATE_CLUSTER: 'createCluster', + /** + * This also applies to any assignment for any object nested under this + * operation + */ + VIEW_ASSIGNMENT_DATA: 'viewAssignmentData', + /** + * This also applies to any assignment for any object nested under this + * operation + */ + EDIT_ASSIGNMENT_RAW_DATA: 'editAssignmentRawData', + /** + * This also applies to any assignment for any object nested under this + * operation + */ + EDIT_ASSIGNMENT_CLEAN_DATA: 'editAssignmentCleanData', + }, + operationCluster: { + /** + * Can view ALL the data for this cluster and nested objects + */ + VIEW_DATA: 'canViewData', + /** + * Can view the high-level metadata for the operation cluster (e.g. name), + * but no underlying objects + */ + VIEW_METADATA: 'canViewMetadata', + VIEW_ASSIGNMENT_DATA: 'viewAssignmentData', + EDIT_ASSIGNMENT_RAW_DATA: 'editAssignmentRawData', + EDIT_ASSIGNMENT_CLEAN_DATA: 'editAssignmentCleanData', + }, + plan: { + /** + * Can move projects associated with this plan to any step of the workflow. + */ + PROJECT_WORKFLOW_MOVE_TO_ANY_STEP: 'projectWorkflowMoveToAnyStep', + /** + * Can view the data associated with the plan + */ + VIEW_DATA: 'viewPlanData', + /** + * Can edit the data associated with the plan (when that plan is editable) + */ + EDIT_DATA: 'editPlanData', + }, + project: { + MODIFY_ACCESS_AND_PERMISSIONS: 'canModifyAccessAndPermissions', + }, + governingEntity: { + /** + * Can edit the data associated with the governing Entity + * (when it's associated plan is editable) + */ + EDIT_DATA: 'editGoverningEntityData', + /** + * Can move projects associated with this cluster to any step of the workflow. + */ + PROJECT_WORKFLOW_MOVE_IF_PLAN_UNLOCKED: 'projectWorkflowMoveIfPlanUnlocked', + }, +} as const; + +/** + * Get the union type of string permissions allowed for a particular target type + */ +export type PermissionStrings = + typeof AUTH_PERMISSIONS[K][keyof typeof AUTH_PERMISSIONS[K]]; + +export type GlobalPermission = PermissionStrings<'global'>; + +/** + * Type that represents an object containing the complete computed permissions + * for a particular user or actor. + */ +export type GrantedPermissions = { + -readonly [key in keyof Partial< + Omit + >]: Map>>; +} & { + global?: Set; +}; + +/** + * A type that represents a requirement for a permission to be granted for a + * particular action to take place. + */ +export type RequiredPermission = + | { + type: 'global'; + /** + * TODO: move all global roles / permissions to new authentication system + */ + permission: GlobalPermission | PermittedActionIdString; + } + | { + type: 'operation'; + permission: PermissionStrings<'operation'>; + id: number; + } + | { + type: 'operationCluster'; + permission: PermissionStrings<'operationCluster'>; + id: number; + } + | { + type: 'plan'; + permission: PermissionStrings<'plan'>; + id: number; + } + | { + type: 'project'; + permission: PermissionStrings<'project'>; + id: number; + } + | { + type: 'governingEntity'; + permission: PermissionStrings<'governingEntity'>; + id: number; + }; + +/** + * A list of permissions that must all be granted for an access to be permitted + */ +type RequiredPermissionsConjunctionAnd = { + and: RequiredPermissionsCondition[]; +}; + +export type RequiredPermissionsConjunction = + | RequiredPermissionsConjunctionAnd + | RequiredPermission; + +export const isAnd = ( + conj: RequiredPermissionsCondition +): conj is RequiredPermissionsConjunctionAnd => + !!(conj as RequiredPermissionsConjunctionAnd).and; + +/** + * A condition expressing the permissions required for an action in + * Disjunctive Normal Form (https://en.wikipedia.org/wiki/Disjunctive_normal_form) + */ +export type RequiredPermissionsConditionOr = { + or: RequiredPermissionsCondition[]; +}; + +export type RequiredPermissionsCondition = + | RequiredPermissionsConditionOr + | RequiredPermissionsConjunction + | 'anyone' + | 'noone'; + +export const isOr = ( + conj: RequiredPermissionsCondition +): conj is RequiredPermissionsConditionOr => + !!(conj as RequiredPermissionsConditionOr).or; + +export const hasRequiredPermissions = ( + granted: GrantedPermissions, + condition: RequiredPermissionsCondition +): boolean => { + const isAllowed = (permission: RequiredPermission): boolean => { + if (permission.type === 'global') { + return !!granted.global?.has(permission.permission); + } + const map = granted[permission.type]; + const set: Set | undefined = map?.get(permission.id); + return !!set?.has(permission.permission); + }; + + const checkCondition = (cond: RequiredPermissionsCondition): boolean => { + if (cond === 'anyone') { + return true; + } else if (cond === 'noone') { + return false; + } else if (isOr(cond)) { + for (const conjunction of cond.or) { + if (checkCondition(conjunction)) { + return true; + } + } + return false; + } else if (isAnd(cond)) { + for (const p of cond.and) { + if (!checkCondition(p)) { + return false; + } + } + return true; + } + return isAllowed(cond); + }; + + return checkCondition(condition); +}; diff --git a/src/auth/roles.ts b/src/auth/roles.ts new file mode 100644 index 00000000..edb0abe9 --- /dev/null +++ b/src/auth/roles.ts @@ -0,0 +1,380 @@ +import GoverningEntity from '../db/models/governingEntity'; +import OperationCluster from '../db/models/operationCluster'; +import { DeferredFetcherForModel } from '../db/util/deferred'; +import { LogContext } from '../logging/logging'; +import { getOrCreate } from '../util'; +import { createBrandedValue } from '../util/types'; +import { AUTH_PERMISSIONS as P, GrantedPermissions } from './permissions'; + +/** + * A breakdown of the different types of roles are available + * for different target types. + */ +export const AUTH_ROLES = { + global: { + HPC_ADMIN: 'hpc_admin', + RPM_ADMIN: 'rpmAdmin', + FTS_ADMIN: 'ftsAdmin', + PROJECTS_ADMIN: 'projectsAdmin', + OMNISCIENT: 'omniscient', + SWAPS: 'swaps', + }, + operation: { + OPERATION_LEAD: 'operationLead', + READ_ONLY: 'readonly', + }, + operationCluster: { + CLUSTER_LEAD: 'clusterLead', + }, + plan: { + PLAN_LEAD: 'planLead', + READ_ONLY: 'readonly', + }, + project: { + PROJECT_OWNER: 'projectOwner', + }, + governingEntity: { + CLUSTER_LEAD: 'clusterLead', + }, +} as const; + +export type RoleAuthTargetString = keyof typeof AUTH_ROLES; + +/** + * Get the union type of string permissions allowed for a particular target type + */ +export type RolesStrings = + typeof AUTH_ROLES[K][keyof typeof AUTH_ROLES[K]] & string; + +export const getValidRolesForTargetType = ( + targetType: K +): Array> => + Object.values(AUTH_ROLES[targetType]) as Array>; + +export const isValidRoleString = ( + targetType: K, + role: string +): role is RolesStrings => { + const validRoles = new Set(Object.values(AUTH_ROLES[targetType])); + if (!validRoles.has(role)) { + return false; + } + return true; +}; + +export const isValidRoleStringsArray = ( + targetType: K, + roles: string[] +): roles is Array> => { + for (const r of roles) { + const validRoles = new Set(Object.values(AUTH_ROLES[targetType])); + if (!validRoles.has(r)) { + return false; + } + } + return true; +}; + +/** + * Filter out any invalid roles for the given type, + * and report an error if an invalid role is found + */ +export const filterValidRoleStrings = ( + targetType: K, + roles: string[], + logContext: LogContext +): Array> => { + const valid = (role: string): role is RolesStrings => + isValidRoleString(targetType, role); + const validRoles = roles.filter(valid); + if (roles.length !== validRoles.length) { + const invalid = roles.filter( + (role) => !isValidRoleString(targetType, role) + ); + const error = new Error( + `Invalid roles found for user: ${invalid.join(', ')}` + ); + logContext.error(error.message, { + error, + }); + } + return validRoles; +}; + +/** + * A type that represents granted roles for a particular target + */ +export type RolesGrant = + | { + type: 'global'; + roles: RolesStrings<'global'>[]; + } + | { + type: 'operation'; + roles: RolesStrings<'operation'>[]; + id: number; + } + | { + type: 'operationCluster'; + roles: RolesStrings<'operationCluster'>[]; + id: number; + } + | { + type: 'plan'; + roles: RolesStrings<'plan'>[]; + id: number; + } + | { + type: 'project'; + roles: RolesStrings<'project'>[]; + id: number; + } + | { + type: 'governingEntity'; + roles: RolesStrings<'governingEntity'>[]; + id: number; + }; + +/** + * Create a function that checks whether a given RolesGrant is for a specific + * target type and role + */ +export const getGrantValidator = + (type: K, role: RolesStrings) => + (grant: RolesGrant): grant is RolesGrant & { type: K } => + grant.type === type && (grant.roles as RolesStrings[]).includes(role); + +/** + * Calculate the effective permissions that are granted to a user based on a + * list of grants for a particular target; + * + * This is implemented as a function (rather than being based purely on data) + * as there are sometimes intricate behaviours / cascading permissions we want + * to implement (such as cluster leads having read access to their operations). + */ +export const calculatePermissionsFromRolesGrant = async ( + grant: RolesGrant, + fetchers: { + governingEntity: DeferredFetcherForModel< + ReturnType + >; + operationCluster: DeferredFetcherForModel< + ReturnType + >; + } +): Promise => { + const granted: GrantedPermissions = {}; + if (grant.type === 'global') { + const global = (granted.global = granted.global || new Set()); + for (const role of grant.roles) { + if (role === 'hpc_admin') { + // All new Permissions + for (const perm of Object.values(P.global)) { + global.add(perm); + } + // Legacy Permissions + global.add('accessAllFlows'); + global.add('accessAllPlans'); + global.add('createDisaggregationModel'); + global.add('createOperation'); + global.add('createPlan'); + global.add('createProcedure'); + global.add('createProject'); + global.add('deletePlan'); + global.add('deleteUploadedFile'); + global.add('editAnyParticipantCountry'); + global.add('editAnyParticipantOrganization'); + global.add('editAnyProject'); + global.add('editOrganizations'); + global.add('editParticipants'); + global.add('editPlanBlueprint'); + global.add('editPlanRevisionState'); + global.add('editRoleAuthenticationKeys'); + global.add('manageCategories'); + global.add('moveToAnyStep'); + global.add('updatePermittedActions'); + global.add('updateProject'); + global.add('viewRevisions'); + } else if (role === 'rpmAdmin') { + // New Permissions + global.add(P.global.VIEW_ANY_PLAN_DATA); + global.add(P.global.EDIT_ANY_PLAN_DATA); + // Legacy Permissions + global.add('accessAllPlans'); + global.add('createDisaggregationModel'); + global.add('createOperation'); + global.add('createPlan'); + global.add('deletePlan'); + global.add('deleteUploadedFile'); + global.add('editParticipants'); + global.add('editPlanBlueprint'); + global.add('editPlanRevisionState'); + global.add('updatePermittedActions'); + global.add('viewRevisions'); + } else if (role === 'ftsAdmin') { + // New Permissions + global.add(P.global.VIEW_ANY_FLOW); + global.add(P.global.EDIT_ANY_FLOW); + global.add(P.global.EDIT_CATEGORIES); + global.add(P.global.VIEW_CATEGORIES); + // Legacy Permissions + global.add('accessAllFlows'); + global.add('deleteUploadedFile'); + global.add('editOrganizations'); + global.add('manageCategories'); + } else if (role === 'projectsAdmin') { + // New Permissions + global.add(P.global.PROJECT_WORKFLOW_MOVE_TO_ANY_STEP); + // Legacy Permissions + global.add('deleteUploadedFile'); + global.add('editAnyParticipantCountry'); + global.add('editAnyParticipantOrganization'); + global.add('editAnyProject'); + global.add('moveToAnyStep'); + } else if (role === 'omniscient') { + // Legacy Permissions + global.add('accessAllFlows'); + global.add('accessAllPlans'); + } else if (role === 'swaps') { + global.add(P.global.MODIFY_OPERATION_ACCESS_AND_PERMISSIONS); + global.add(P.global.ADD_OPERATION); + global.add(P.global.VIEW_OPERATION_DATA); + global.add(P.global.VIEW_OPERATION_METADATA); + global.add(P.global.EDIT_FORM_ASSIGNMENT_CLEAN_DATA); + global.add(P.global.EDIT_FORM_ASSIGNMENT_RAW_DATA); + } + } + } else if (grant.type === 'operation') { + const global = (granted.global = granted.global || new Set()); + if (!granted.operation) { + granted.operation = new Map(); + } + let operationSet = granted.operation.get(grant.id); + if (!operationSet) { + granted.operation.set(grant.id, (operationSet = new Set())); + } + for (const role of grant.roles) { + if (role === 'operationLead') { + global.add(P.global.VIEW_ASSIGNED_OPERATION_METADATA); + global.add(P.global.CREATE_ORGANIZATIONS); + operationSet.add(P.operation.CREATE_CLUSTER); + operationSet.add(P.operation.EDIT_ASSIGNMENT_RAW_DATA); + operationSet.add(P.operation.MODIFY_CLUSTER_ACCESS_AND_PERMISSIONS); + operationSet.add(P.operation.VIEW_ASSIGNMENT_DATA); + operationSet.add(P.operation.VIEW_CLUSTER_METADATA); + operationSet.add(P.operation.VIEW_DATA); + operationSet.add(P.operation.VIEW_METADATA); + } else if (role === 'readonly') { + global.add(P.global.VIEW_ASSIGNED_OPERATION_METADATA); + operationSet.add(P.operation.VIEW_ASSIGNMENT_DATA); + operationSet.add(P.operation.VIEW_CLUSTER_METADATA); + operationSet.add(P.operation.VIEW_DATA); + operationSet.add(P.operation.VIEW_METADATA); + } + } + } else if (grant.type === 'operationCluster') { + const cluster = await fetchers.operationCluster.get( + createBrandedValue(grant.id) + ); + if (cluster) { + const global = (granted.global = granted.global || new Set()); + if (!granted.operation) { + granted.operation = new Map(); + } + let operationSet = granted.operation.get(cluster.data.operation); + if (!operationSet) { + granted.operation.set( + cluster.data.operation, + (operationSet = new Set()) + ); + } + if (!granted.operationCluster) { + granted.operationCluster = new Map(); + } + let clusterSet = granted.operationCluster.get(cluster.id); + if (!clusterSet) { + granted.operationCluster.set(cluster.id, (clusterSet = new Set())); + } + for (const role of grant.roles) { + if (role === 'clusterLead') { + global.add(P.global.VIEW_ASSIGNED_OPERATION_METADATA); + operationSet.add(P.operation.VIEW_CLUSTER_METADATA); + operationSet.add(P.operation.VIEW_METADATA); + clusterSet.add(P.operationCluster.VIEW_METADATA); + clusterSet.add(P.operationCluster.VIEW_DATA); + clusterSet.add(P.operationCluster.VIEW_ASSIGNMENT_DATA); + clusterSet.add(P.operationCluster.EDIT_ASSIGNMENT_RAW_DATA); + } + } + } + } else if (grant.type === 'plan') { + const global = (granted.global = granted.global || new Set()); + if (!granted.plan) { + granted.plan = new Map(); + } + let planSet = granted.plan.get(grant.id); + if (!planSet) { + granted.plan.set(grant.id, (planSet = new Set())); + } + // TODO: we grant all plan leads access to move any project to any step + // this is probably incorrect, we should fix this if so + // See: https://humanitarian.atlassian.net/browse/HPC-7467 + for (const role of grant.roles) { + if (role === 'planLead') { + global.add('moveToAnyStep'); + planSet.add(P.plan.PROJECT_WORKFLOW_MOVE_TO_ANY_STEP); + planSet.add(P.plan.VIEW_DATA); + planSet.add(P.plan.EDIT_DATA); + } + } + } else if (grant.type === 'project') { + if (!granted.project) { + granted.project = new Map(); + } + let projectSet = granted.project.get(grant.id); + if (!projectSet) { + granted.project.set(grant.id, (projectSet = new Set())); + } + for (const role of grant.roles) { + if (role === 'projectOwner') { + projectSet.add(P.project.MODIFY_ACCESS_AND_PERMISSIONS); + } + } + } else if (grant.type === 'governingEntity') { + if (!granted.governingEntity) { + granted.governingEntity = new Map(); + } + let geSet = granted.governingEntity.get(grant.id); + if (!geSet) { + granted.governingEntity.set(grant.id, (geSet = new Set())); + } + + const governingEntity = await fetchers.governingEntity.get( + createBrandedValue(grant.id) + ); + + if (!governingEntity) { + throw new Error(`Cannot find governing entity with ID ${grant.id}`); + } + + if (!granted.plan) { + granted.plan = new Map(); + } + + const planSet = getOrCreate( + granted.plan, + governingEntity.planId, + () => new Set() + ); + + for (const role of grant.roles) { + if (role === 'clusterLead') { + geSet.add(P.governingEntity.EDIT_DATA); + geSet.add(P.governingEntity.PROJECT_WORKFLOW_MOVE_IF_PLAN_UNLOCKED); + planSet.add(P.plan.VIEW_DATA); + } + } + } + + return granted; +}; diff --git a/src/db/util/async.ts b/src/db/util/async.ts new file mode 100644 index 00000000..33904317 --- /dev/null +++ b/src/db/util/async.ts @@ -0,0 +1,65 @@ +/** + * Create an async function that can be called multiple times with different + * arguments, and result in a single invocation of an async function that + * serves all the given calls at once. + * + * This function takes advantage of the NodeJS single-threaded concurrency model + * to group multiple requests together using `setImmediate`. + */ +export const createGroupableAsyncFunction = < + Args extends readonly any[], + Result +>(opts: { + /** + * This function should asynchronously return an array of results where each + * item corresponds to the item in the `calls` array with the same index. + * + * An error will be thrown (and all requests will reject) when the returned + * array has a different length to the `calls` array. + */ + run: (calls: Args[]) => Promise; +}): ((...args: Args) => Promise) => { + type Call = { + args: Args; + resolve: (result: Result) => void; + reject: (err: Error) => void; + }; + + const calls = new Set(); + let nextRun: NodeJS.Immediate | null = null; + + const doNextRun = async () => { + // Copy + clear state so any later requests will use run + const cs = [...calls]; + calls.clear(); + nextRun = null; + + try { + const result = await opts.run(cs.map((req) => req.args)); + if (result.length !== cs.length) { + throw new Error(`Received unexpected number of results`); + } + for (let i = 0; i < cs.length; i++) { + cs[i].resolve(result[i]); + } + } catch (err) { + for (const call of cs) { + call.reject(err); + } + } + }; + + return async (...args: Args): Promise => { + if (!nextRun) { + nextRun = setImmediate(doNextRun); + } + + return new Promise((resolve, reject) => { + calls.add({ + args, + resolve, + reject, + }); + }); + }; +}; diff --git a/src/db/util/deferred.ts b/src/db/util/deferred.ts new file mode 100644 index 00000000..2ec104fd --- /dev/null +++ b/src/db/util/deferred.ts @@ -0,0 +1,53 @@ +import { createGroupableAsyncFunction } from './async'; + +/** + * A model that allows for fetching of multiple items by their ID at once + */ +interface DeferrableModel { + getAll: (ids: ID[]) => Promise>; +} + +/** + * A utility that allows for grouping individual database get requests into a + * single query. + * + * See `createDeferredFetcher` + */ +export type DeferredFetcher = { + get: (id: IDType) => Promise; +}; + +/** + * Determine the type of a deferred fetcher for a particular versioned model. + * + * This is useful for specifying the parameters of a function that needs a + * deferred fetcher of a particular table + */ +export type DeferredFetcherForModel> = + M extends DeferrableModel + ? DeferredFetcher + : never; + +/** + * Create a DeferredFetcher for a specific VersionedModel. + * + * This function uses `createGroupableAsyncFunction` + */ +export const createDeferredFetcher = < + IDType extends number, + Instance extends { id: IDType } +>( + model: DeferrableModel +): DeferredFetcher => { + const get = createGroupableAsyncFunction({ + run: async (calls: [IDType][]): Promise<(Instance | null)[]> => { + const ids = [...new Set(calls.map(([id]) => id))]; + const result = await model.getAll(ids); + return calls.map(([id]) => result.get(id) || null); + }, + }); + + return { + get, + }; +}; diff --git a/src/db/util/index.ts b/src/db/util/index.ts new file mode 100644 index 00000000..20382e34 --- /dev/null +++ b/src/db/util/index.ts @@ -0,0 +1,20 @@ +/** + * Take a collection of objects, + * and create a map of them, using aparticular property as the key + */ +export const organizeObjectsByUniqueProperty = ( + objects: Iterable, + property: P +): Map => { + const result = new Map(); + + for (const obj of objects) { + const existing = result.get(obj[property]); + if (existing) { + throw new Error(`Duplicate property value: ${obj[property]}`); + } + result.set(obj[property], obj); + } + + return result; +}; diff --git a/src/util/index.ts b/src/util/index.ts new file mode 100644 index 00000000..d9585109 --- /dev/null +++ b/src/util/index.ts @@ -0,0 +1,10 @@ +export const getOrCreate = (map: Map, k: K, val: () => V): V => { + let v = map.get(k); + + if (v === undefined) { + v = val(); + map.set(k, v); + } + + return v; +}; diff --git a/src/util/types.ts b/src/util/types.ts index 9a936898..b9b1c1df 100644 --- a/src/util/types.ts +++ b/src/util/types.ts @@ -18,3 +18,6 @@ export type Brand = T & { readonly __brand__: S; readonly __label__: Label; }; + +export const createBrandedValue = >(v: T): B => + v as B; diff --git a/yarn.lock b/yarn.lock index 7cf7ffb6..226ee3ee 100644 --- a/yarn.lock +++ b/yarn.lock @@ -230,6 +230,14 @@ resolved "https://registry.yarnpkg.com/@types/mime-db/-/mime-db-1.43.1.tgz#c2a0522453bb9b6e84ee48b7eef765d19bcd519e" integrity sha512-kGZJY+R+WnR5Rk+RPHUMERtb2qBRViIHCBdtUrY+NmwuGb8pQdfTqQiCKPrxpdoycl8KWm2DLdkpoSdt479XoQ== +"@types/node-fetch@^2.5.7": + version "2.5.12" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.12.tgz#8a6f779b1d4e60b7a57fb6fd48d84fb545b9cc66" + integrity sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw== + dependencies: + "@types/node" "*" + form-data "^3.0.0" + "@types/node@*": version "16.6.0" resolved "https://registry.yarnpkg.com/@types/node/-/node-16.6.0.tgz#0d5685f85066f94e97f19e8a67fe003c5fadacc4" @@ -461,6 +469,11 @@ astral-regex@^2.0.0: resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + atob@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" @@ -629,6 +642,13 @@ colorette@^1.2.2: resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== +combined-stream@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + commander@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" @@ -732,6 +752,11 @@ define-property@^2.0.2: is-descriptor "^1.0.2" isobject "^3.0.1" +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + detect-file@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/detect-file/-/detect-file-1.0.0.tgz#f0d66d03672a825cb1b73bdb3fe62310c8e552b7" @@ -1089,6 +1114,15 @@ for-own@^1.0.0: dependencies: for-in "^1.0.1" +form-data@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f" + integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.8" + mime-types "^2.1.12" + fp-ts@^2.6.6: version "2.10.5" resolved "https://registry.yarnpkg.com/fp-ts/-/fp-ts-2.10.5.tgz#7c77868fe8bd9b229743303c1bec505b959f631b" @@ -1746,6 +1780,18 @@ micromatch@^4.0.2, micromatch@^4.0.4: braces "^3.0.1" picomatch "^2.2.3" +mime-db@1.49.0: + version "1.49.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.49.0.tgz#f3dfde60c99e9cf3bc9701d687778f537001cbed" + integrity sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA== + +mime-types@^2.1.12: + version "2.1.32" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.32.tgz#1d00e89e7de7fe02008db61001d9e02852670fd5" + integrity sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A== + dependencies: + mime-db "1.49.0" + mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -1808,6 +1854,11 @@ natural-compare@^1.4.0: resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= +node-fetch@~2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" + integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== + normalize-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"