diff --git a/frontend/public/services/sql-azure.png b/frontend/public/services/sql-azure.png
new file mode 100644
index 0000000000000..87648c4078e30
Binary files /dev/null and b/frontend/public/services/sql-azure.png differ
diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx
index 054701ebbe769..ed731503746dd 100644
--- a/frontend/src/lib/constants.tsx
+++ b/frontend/src/lib/constants.tsx
@@ -208,6 +208,7 @@ export const FEATURE_FLAGS = {
WEB_ANALYTICS_REPLAY: 'web-analytics-replay', // owner: @robbie-c
BATCH_EXPORTS_POSTHOG_HTTP: 'posthog-http-batch-exports',
EXPERIMENT_MAKE_DECISION: 'experiment-make-decision', // owner: @jurajmajerik #team-feature-success
+ MSSQL_SOURCE: 'mssql_source', // owner: @Gilbert09 #team-data-warehouse
} as const
export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS]
diff --git a/frontend/src/scenes/data-warehouse/new/NewSourceWizard.tsx b/frontend/src/scenes/data-warehouse/new/NewSourceWizard.tsx
index e3c29b8afbb37..22e6e407ecb21 100644
--- a/frontend/src/scenes/data-warehouse/new/NewSourceWizard.tsx
+++ b/frontend/src/scenes/data-warehouse/new/NewSourceWizard.tsx
@@ -164,7 +164,11 @@ function FirstStep(): JSX.Element {
title: 'Name',
key: 'name',
render: function RenderName(_, sourceConfig) {
- return {sourceConfig.name}
+ return (
+
+ {sourceConfig.label ?? sourceConfig.name}
+
+ )
},
},
{
diff --git a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx
index 6c16e79e369f5..d597b90b99f14 100644
--- a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx
+++ b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx
@@ -3,6 +3,8 @@ import { actions, connect, kea, listeners, path, props, reducers, selectors } fr
import { forms } from 'kea-forms'
import { router, urlToAction } from 'kea-router'
import api from 'lib/api'
+import { FEATURE_FLAGS } from 'lib/constants'
+import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import posthog from 'posthog-js'
import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic'
import { Scene } from 'scenes/sceneTypes'
@@ -331,6 +333,138 @@ export const SOURCE_DETAILS: Record = {
},
],
},
+ MSSQL: {
+ name: 'MSSQL',
+ label: 'Azure SQL Server',
+ caption: (
+ <>
+ Enter your MS SQL Server/Azure SQL Server credentials to automatically pull your SQL data into the
+ PostHog Data warehouse.
+ >
+ ),
+ fields: [
+ {
+ name: 'host',
+ label: 'Host',
+ type: 'text',
+ required: true,
+ placeholder: 'localhost',
+ },
+ {
+ name: 'port',
+ label: 'Port',
+ type: 'number',
+ required: true,
+ placeholder: '1433',
+ },
+ {
+ name: 'dbname',
+ label: 'Database',
+ type: 'text',
+ required: true,
+ placeholder: 'msdb',
+ },
+ {
+ name: 'user',
+ label: 'User',
+ type: 'text',
+ required: true,
+ placeholder: 'sa',
+ },
+ {
+ name: 'password',
+ label: 'Password',
+ type: 'password',
+ required: true,
+ placeholder: '',
+ },
+ {
+ name: 'schema',
+ label: 'Schema',
+ type: 'text',
+ required: true,
+ placeholder: 'dbo',
+ },
+ {
+ name: 'ssh-tunnel',
+ label: 'Use SSH tunnel?',
+ type: 'switch-group',
+ default: false,
+ fields: [
+ {
+ name: 'host',
+ label: 'Tunnel host',
+ type: 'text',
+ required: true,
+ placeholder: 'localhost',
+ },
+ {
+ name: 'port',
+ label: 'Tunnel port',
+ type: 'number',
+ required: true,
+ placeholder: '22',
+ },
+ {
+ type: 'select',
+ name: 'auth_type',
+ label: 'Authentication type',
+ required: true,
+ defaultValue: 'password',
+ options: [
+ {
+ label: 'Password',
+ value: 'password',
+ fields: [
+ {
+ name: 'username',
+ label: 'Tunnel username',
+ type: 'text',
+ required: true,
+ placeholder: 'User1',
+ },
+ {
+ name: 'password',
+ label: 'Tunnel password',
+ type: 'password',
+ required: true,
+ placeholder: '',
+ },
+ ],
+ },
+ {
+ label: 'Key pair',
+ value: 'keypair',
+ fields: [
+ {
+ name: 'username',
+ label: 'Tunnel username',
+ type: 'text',
+ required: false,
+ placeholder: 'User1',
+ },
+ {
+ name: 'private_key',
+ label: 'Tunnel private key',
+ type: 'textarea',
+ required: true,
+ placeholder: '',
+ },
+ {
+ name: 'passphrase',
+ label: 'Tunnel passphrase',
+ type: 'password',
+ required: false,
+ placeholder: '',
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
Snowflake: {
name: 'Snowflake',
caption: (
@@ -534,6 +668,8 @@ export const sourceWizardLogic = kea([
['dataWarehouseSources'],
preflightLogic,
['preflight'],
+ featureFlagLogic,
+ ['featureFlags'],
],
actions: [
dataWarehouseTableLogic,
@@ -714,15 +850,21 @@ export const sourceWizardLogic = kea([
(selectedConnector, isManualLinkFormVisible) => selectedConnector || isManualLinkFormVisible,
],
connectors: [
- (s) => [s.dataWarehouseSources],
- (sources): SourceConfig[] => {
- return Object.values(SOURCE_DETAILS).map((connector) => ({
+ (s) => [s.dataWarehouseSources, s.featureFlags],
+ (sources, featureFlags): SourceConfig[] => {
+ const connectors = Object.values(SOURCE_DETAILS).map((connector) => ({
...connector,
disabledReason:
sources && sources.results.find((source) => source.source_type === connector.name)
? 'Already linked'
: null,
}))
+
+ if (!featureFlags[FEATURE_FLAGS.MSSQL_SOURCE]) {
+ return connectors.filter((n) => n.name !== 'MSSQL')
+ }
+
+ return connectors
},
],
manualConnectors: [
diff --git a/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx b/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx
index 69c12250eea14..e983ce363bd95 100644
--- a/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx
+++ b/frontend/src/scenes/data-warehouse/settings/DataWarehouseManagedSourcesTable.tsx
@@ -12,12 +12,14 @@ import IconMySQL from 'public/services/mysql.png'
import IconPostgres from 'public/services/postgres.png'
import IconSalesforce from 'public/services/salesforce.png'
import IconSnowflake from 'public/services/snowflake.png'
+import IconMSSQL from 'public/services/sql-azure.png'
import IconStripe from 'public/services/stripe.png'
import IconZendesk from 'public/services/zendesk.png'
import { urls } from 'scenes/urls'
import { manualLinkSources, PipelineNodeTab, PipelineStage } from '~/types'
+import { SOURCE_DETAILS } from '../new/sourceWizardLogic'
import { dataWarehouseSettingsLogic } from './dataWarehouseSettingsLogic'
const StatusTagSetting = {
@@ -56,7 +58,7 @@ export function DataWarehouseManagedSourcesTable(): JSX.Element {
`managed-${source.id}`,
PipelineNodeTab.Schemas
)}
- title={source.source_type}
+ title={SOURCE_DETAILS[source.source_type]?.label ?? source.source_type}
description={source.prefix}
/>
)
@@ -185,6 +187,7 @@ export function RenderDataWarehouseSourceIcon({
'cloudflare-r2': IconCloudflare,
azure: Iconazure,
Salesforce: IconSalesforce,
+ MSSQL: IconMSSQL,
}[type]
return (
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index 7bdb1a9b8b306..d66b0beb1e788 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -3854,6 +3854,7 @@ export const externalDataSources = [
'Hubspot',
'Postgres',
'MySQL',
+ 'MSSQL',
'Zendesk',
'Snowflake',
'Salesforce',
@@ -3875,7 +3876,7 @@ export interface ExternalDataStripeSource {
source_id: string
connection_id: string
status: string
- source_type: string
+ source_type: ExternalDataSourceType
prefix: string
last_run_at?: Dayjs
schemas: ExternalDataSourceSchema[]
@@ -4249,6 +4250,7 @@ export type SourceFieldConfig =
export interface SourceConfig {
name: ExternalDataSourceType
+ label?: string
caption: string | React.ReactNode
fields: SourceFieldConfig[]
disabledReason?: string | null
diff --git a/latest_migrations.manifest b/latest_migrations.manifest
index af30ae6589b0d..5f32a0ea3f522 100644
--- a/latest_migrations.manifest
+++ b/latest_migrations.manifest
@@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name
ee: 0016_rolemembership_organization_member
otp_static: 0002_throttling
otp_totp: 0002_auto_20190420_0723
-posthog: 0460_alertconfiguration_threshold_alertsubscription_and_more
+posthog: 0461_alter_externaldatasource_source_type
sessions: 0001_initial
social_django: 0010_uid_db_index
two_factor: 0007_auto_20201201_1019
diff --git a/posthog/migrations/0461_alter_externaldatasource_source_type.py b/posthog/migrations/0461_alter_externaldatasource_source_type.py
new file mode 100644
index 0000000000000..8ede6c0fd01eb
--- /dev/null
+++ b/posthog/migrations/0461_alter_externaldatasource_source_type.py
@@ -0,0 +1,29 @@
+# Generated by Django 4.2.14 on 2024-08-23 09:54
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("posthog", "0460_alertconfiguration_threshold_alertsubscription_and_more"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="externaldatasource",
+ name="source_type",
+ field=models.CharField(
+ choices=[
+ ("Stripe", "Stripe"),
+ ("Hubspot", "Hubspot"),
+ ("Postgres", "Postgres"),
+ ("Zendesk", "Zendesk"),
+ ("Snowflake", "Snowflake"),
+ ("Salesforce", "Salesforce"),
+ ("MySQL", "MySQL"),
+ ("MSSQL", "MSSQL"),
+ ],
+ max_length=128,
+ ),
+ ),
+ ]
diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py
index b0715f952f854..0dbfd8f5c3fd7 100644
--- a/posthog/temporal/data_imports/pipelines/pipeline.py
+++ b/posthog/temporal/data_imports/pipelines/pipeline.py
@@ -54,6 +54,8 @@ def __init__(
self.should_chunk_pipeline = (
incremental
and inputs.job_type != ExternalDataSource.Type.POSTGRES
+ and inputs.job_type != ExternalDataSource.Type.MYSQL
+ and inputs.job_type != ExternalDataSource.Type.MSSQL
and inputs.job_type != ExternalDataSource.Type.SNOWFLAKE
)
diff --git a/posthog/temporal/data_imports/pipelines/schemas.py b/posthog/temporal/data_imports/pipelines/schemas.py
index c8c0d9729c2d8..0acd00e8bd6f3 100644
--- a/posthog/temporal/data_imports/pipelines/schemas.py
+++ b/posthog/temporal/data_imports/pipelines/schemas.py
@@ -28,6 +28,7 @@
ExternalDataSource.Type.SNOWFLAKE: (),
ExternalDataSource.Type.SALESFORCE: SALESFORCE_ENDPOINTS,
ExternalDataSource.Type.MYSQL: (),
+ ExternalDataSource.Type.MSSQL: (),
}
PIPELINE_TYPE_INCREMENTAL_ENDPOINTS_MAPPING = {
@@ -38,6 +39,7 @@
ExternalDataSource.Type.SNOWFLAKE: (),
ExternalDataSource.Type.SALESFORCE: SALESFORCE_INCREMENTAL_ENDPOINTS,
ExternalDataSource.Type.MYSQL: (),
+ ExternalDataSource.Type.MSSQL: (),
}
PIPELINE_TYPE_INCREMENTAL_FIELDS_MAPPING: dict[ExternalDataSource.Type, dict[str, list[IncrementalField]]] = {
@@ -48,4 +50,5 @@
ExternalDataSource.Type.SNOWFLAKE: {},
ExternalDataSource.Type.SALESFORCE: SALESFORCE_INCREMENTAL_FIELDS,
ExternalDataSource.Type.MYSQL: {},
+ ExternalDataSource.Type.MSSQL: {},
}
diff --git a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py
index 65a4ca9527cd6..0fc7f7394b6ad 100644
--- a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py
+++ b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py
@@ -69,6 +69,10 @@ def sql_source_for_type(
)
elif source_type == ExternalDataSource.Type.MYSQL:
credentials = ConnectionStringCredentials(f"mysql+pymysql://{user}:{password}@{host}:{port}/{database}")
+ elif source_type == ExternalDataSource.Type.MSSQL:
+ credentials = ConnectionStringCredentials(
+ f"mssql+pyodbc://{user}:{password}@{host}:{port}/{database}?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
+ )
else:
raise Exception("Unsupported source_type")
diff --git a/posthog/temporal/data_imports/workflow_activities/import_data.py b/posthog/temporal/data_imports/workflow_activities/import_data.py
index 974edc7ca3460..6ce4237f53711 100644
--- a/posthog/temporal/data_imports/workflow_activities/import_data.py
+++ b/posthog/temporal/data_imports/workflow_activities/import_data.py
@@ -102,7 +102,11 @@ async def import_data_activity(inputs: ImportDataActivityInputs):
schema=schema,
reset_pipeline=reset_pipeline,
)
- elif model.pipeline.source_type in [ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL]:
+ elif model.pipeline.source_type in [
+ ExternalDataSource.Type.POSTGRES,
+ ExternalDataSource.Type.MYSQL,
+ ExternalDataSource.Type.MSSQL,
+ ]:
from posthog.temporal.data_imports.pipelines.sql_database import sql_source_for_type
host = model.pipeline.job_inputs.get("host")
diff --git a/posthog/warehouse/api/external_data_schema.py b/posthog/warehouse/api/external_data_schema.py
index 5b982e54b8434..6db5379a2fe96 100644
--- a/posthog/warehouse/api/external_data_schema.py
+++ b/posthog/warehouse/api/external_data_schema.py
@@ -23,6 +23,7 @@
cancel_external_data_workflow,
)
from posthog.warehouse.models.external_data_schema import (
+ filter_mssql_incremental_fields,
filter_mysql_incremental_fields,
filter_postgres_incremental_fields,
filter_snowflake_incremental_fields,
@@ -269,7 +270,11 @@ def incremental_fields(self, request: Request, *args: Any, **kwargs: Any):
source: ExternalDataSource = instance.source
incremental_columns: list[IncrementalField] = []
- if source.source_type in [ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL]:
+ if source.source_type in [
+ ExternalDataSource.Type.POSTGRES,
+ ExternalDataSource.Type.MYSQL,
+ ExternalDataSource.Type.MSSQL,
+ ]:
# TODO(@Gilbert09): Move all this into a util and replace elsewhere
host = source.job_inputs.get("host")
port = source.job_inputs.get("port")
@@ -312,8 +317,10 @@ def incremental_fields(self, request: Request, *args: Any, **kwargs: Any):
columns = db_schemas.get(instance.name, [])
if source.source_type == ExternalDataSource.Type.POSTGRES:
incremental_fields_func = filter_postgres_incremental_fields
- else:
+ elif source.source_type == ExternalDataSource.Type.MYSQL:
incremental_fields_func = filter_mysql_incremental_fields
+ elif source.source_type == ExternalDataSource.Type.MSSQL:
+ incremental_fields_func = filter_mssql_incremental_fields
incremental_columns = [
{"field": name, "field_type": field_type, "label": name, "type": field_type}
diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py
index 3fa01db7c1a9b..c3756e2fecddf 100644
--- a/posthog/warehouse/api/external_data_source.py
+++ b/posthog/warehouse/api/external_data_source.py
@@ -33,6 +33,8 @@
get_hubspot_access_token_from_code,
)
from posthog.warehouse.models.external_data_schema import (
+ filter_mssql_incremental_fields,
+ filter_mysql_incremental_fields,
filter_postgres_incremental_fields,
filter_snowflake_incremental_fields,
get_sql_schemas_for_source_type,
@@ -264,7 +266,11 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response:
new_source_model = self._handle_zendesk_source(request, *args, **kwargs)
elif source_type == ExternalDataSource.Type.SALESFORCE:
new_source_model = self._handle_salesforce_source(request, *args, **kwargs)
- elif source_type in [ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL]:
+ elif source_type in [
+ ExternalDataSource.Type.POSTGRES,
+ ExternalDataSource.Type.MYSQL,
+ ExternalDataSource.Type.MSSQL,
+ ]:
try:
new_source_model, sql_schemas = self._handle_sql_source(request, *args, **kwargs)
except InternalPostgresError:
@@ -280,7 +286,11 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Response:
payload = request.data["payload"]
schemas = payload.get("schemas", None)
- if source_type in [ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL]:
+ if source_type in [
+ ExternalDataSource.Type.POSTGRES,
+ ExternalDataSource.Type.MYSQL,
+ ExternalDataSource.Type.MSSQL,
+ ]:
default_schemas = sql_schemas
elif source_type == ExternalDataSource.Type.SNOWFLAKE:
default_schemas = snowflake_schemas
@@ -668,7 +678,11 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any):
)
# Get schemas and validate SQL credentials
- if source_type in [ExternalDataSource.Type.POSTGRES, ExternalDataSource.Type.MYSQL]:
+ if source_type in [
+ ExternalDataSource.Type.POSTGRES,
+ ExternalDataSource.Type.MYSQL,
+ ExternalDataSource.Type.MSSQL,
+ ]:
host = request.data.get("host", None)
port = request.data.get("port", None)
database = request.data.get("dbname", None)
@@ -775,9 +789,18 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any):
data={"message": get_generic_sql_error(source_type)},
)
- filtered_results = [
- (table_name, filter_postgres_incremental_fields(columns)) for table_name, columns in result.items()
- ]
+ if source_type == ExternalDataSource.Type.POSTGRES:
+ filtered_results = [
+ (table_name, filter_postgres_incremental_fields(columns)) for table_name, columns in result.items()
+ ]
+ elif source_type == ExternalDataSource.Type.MYSQL:
+ filtered_results = [
+ (table_name, filter_mysql_incremental_fields(columns)) for table_name, columns in result.items()
+ ]
+ elif source_type == ExternalDataSource.Type.MSSQL:
+ filtered_results = [
+ (table_name, filter_mssql_incremental_fields(columns)) for table_name, columns in result.items()
+ ]
result_mapped_to_options = [
{
diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py
index f42cf3248b8ad..a4e91d45c577f 100644
--- a/posthog/warehouse/models/external_data_schema.py
+++ b/posthog/warehouse/models/external_data_schema.py
@@ -9,6 +9,7 @@
import uuid
import psycopg2
import pymysql
+import pymssql
from .external_data_source import ExternalDataSource
from posthog.warehouse.data_load.service import (
external_data_workflow_exists,
@@ -339,6 +340,59 @@ def get_schemas(mysql_host: str, mysql_port: int):
return get_schemas(host, int(port))
+def filter_mssql_incremental_fields(columns: list[tuple[str, str]]) -> list[tuple[str, IncrementalFieldType]]:
+ results: list[tuple[str, IncrementalFieldType]] = []
+ for column_name, type in columns:
+ type = type.lower()
+ if type == "date":
+ results.append((column_name, IncrementalFieldType.Date))
+ elif type == "datetime" or type == "datetime2" or type == "smalldatetime":
+ results.append((column_name, IncrementalFieldType.DateTime))
+ elif type == "tinyint" or type == "smallint" or type == "int" or type == "bigint":
+ results.append((column_name, IncrementalFieldType.Integer))
+
+ return results
+
+
+def get_mssql_schemas(
+ host: str, port: str, database: str, user: str, password: str, schema: str, ssh_tunnel: SSHTunnel
+) -> dict[str, list[tuple[str, str]]]:
+ def get_schemas(postgres_host: str, postgres_port: int):
+ connection = pymssql.connect(
+ server=postgres_host,
+ port=str(postgres_port),
+ database=database,
+ user=user,
+ password=password,
+ login_timeout=5,
+ )
+
+ with connection.cursor(as_dict=False) as cursor:
+ cursor.execute(
+ "SELECT table_name, column_name, data_type FROM information_schema.columns WHERE table_schema = %(schema)s ORDER BY table_name ASC",
+ {"schema": schema},
+ )
+
+ schema_list = defaultdict(list)
+
+ for row in cursor:
+ if row:
+ schema_list[row[0]].append((row[1], row[2]))
+
+ connection.close()
+
+ return schema_list
+
+ if ssh_tunnel.enabled:
+ with ssh_tunnel.get_tunnel(host, int(port)) as tunnel:
+ if tunnel is None:
+ raise Exception("Can't open tunnel to SSH server")
+
+ return get_schemas(tunnel.local_bind_host, tunnel.local_bind_port)
+
+ return get_schemas(host, int(port))
+
+
def get_sql_schemas_for_source_type(
source_type: ExternalDataSource.Type,
host: str,
@@ -353,6 +407,8 @@ def get_sql_schemas_for_source_type(
schemas = get_postgres_schemas(host, port, database, user, password, schema, ssh_tunnel)
elif source_type == ExternalDataSource.Type.MYSQL:
schemas = get_mysql_schemas(host, port, database, user, password, schema, ssh_tunnel)
+ elif source_type == ExternalDataSource.Type.MSSQL:
+ schemas = get_mssql_schemas(host, port, database, user, password, schema, ssh_tunnel)
else:
raise Exception("Unsupported source_type")
diff --git a/posthog/warehouse/models/external_data_source.py b/posthog/warehouse/models/external_data_source.py
index 49c91d7781764..6f9fe14e01dd9 100644
--- a/posthog/warehouse/models/external_data_source.py
+++ b/posthog/warehouse/models/external_data_source.py
@@ -22,6 +22,7 @@ class Type(models.TextChoices):
SNOWFLAKE = "Snowflake", "Snowflake"
SALESFORCE = "Salesforce", "Salesforce"
MYSQL = "MySQL", "MySQL"
+ MSSQL = "MSSQL", "MSSQL"
class Status(models.TextChoices):
RUNNING = "Running", "Running"
diff --git a/production.Dockerfile b/production.Dockerfile
index 07906afd9bc4c..ff7bd3d619b55 100644
--- a/production.Dockerfile
+++ b/production.Dockerfile
@@ -158,6 +158,12 @@ RUN apt-get update && \
"libxml2" \
"gettext-base"
+# Install MS SQL dependencies
+RUN curl https://packages.microsoft.com/keys/microsoft.asc | tee /etc/apt/trusted.gpg.d/microsoft.asc
+RUN curl https://packages.microsoft.com/config/debian/11/prod.list | tee /etc/apt/sources.list.d/mssql-release.list
+RUN apt-get update
+RUN ACCEPT_EULA=Y apt-get install -y msodbcsql18
+
# Install NodeJS 18.
RUN apt-get install -y --no-install-recommends \
"curl" \
diff --git a/requirements.in b/requirements.in
index ac315f2c5e719..806129962e009 100644
--- a/requirements.in
+++ b/requirements.in
@@ -57,11 +57,13 @@ Pillow==10.2.0
pdpyras==5.2.0
posthoganalytics==3.5.0
psycopg2-binary==2.9.7
+pymssql==2.3.0
PyMySQL==1.1.1
psycopg[binary]==3.1.20
pyarrow==17.0.0
pydantic==2.5.3
pyjwt==2.4.0
+pyodbc==5.1.0
python-dateutil>=2.8.2
python3-saml==1.12.0
pytz==2023.3
diff --git a/requirements.txt b/requirements.txt
index 8e64d768b0ec7..9b00b7283f7f3 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -258,8 +258,6 @@ googleapis-common-protos==1.60.0
# via
# google-api-core
# grpcio-status
-greenlet==3.0.3
- # via sqlalchemy
grpcio==1.57.0
# via
# google-api-core
@@ -456,10 +454,14 @@ pyjwt==2.4.0
# simple-salesforce
# snowflake-connector-python
# social-auth-core
+pymssql==2.3.0
+ # via -r requirements.in
pymysql==1.1.1
# via -r requirements.in
pynacl==1.5.0
# via paramiko
+pyodbc==5.1.0
+ # via -r requirements.in
pyopenssl==23.0.0
# via
# snowflake-connector-python