diff --git a/src/webnn/native/BUILD.gn b/src/webnn/native/BUILD.gn
index 5e682b374..f754b77a8 100644
--- a/src/webnn/native/BUILD.gn
+++ b/src/webnn/native/BUILD.gn
@@ -210,6 +210,17 @@ source_set("sources") {
     }
   }
 
+  if (webnn_enable_dml) {
+    sources += [
+      "dml/BackendDML.cpp",
+      "dml/BackendDML.h",
+      "dml/ContextDML.cpp",
+      "dml/ContextDML.h",
+      "dml/GraphDML.cpp",
+      "dml/GraphDML.h",
+    ]
+  }
+
   if (webnn_enable_dmlx) {
     if (webnn_enable_gpu_buffer == false) {
       sources += [
diff --git a/src/webnn/native/dml/BackendDML.cpp b/src/webnn/native/dml/BackendDML.cpp
new file mode 100644
index 000000000..d98816e81
--- /dev/null
+++ b/src/webnn/native/dml/BackendDML.cpp
@@ -0,0 +1,103 @@
+// Copyright 2019 The Dawn Authors
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "webnn/native/dml/BackendDML.h"
+
+#include "webnn/native/Instance.h"
+#include "webnn/native/dml/ContextDML.h"
+
+namespace webnn::native::dml {
+
+    namespace {
+        HRESULT EnumAdapter(DXGI_GPU_PREFERENCE gpuPreference,
+                            bool useGpu,
+                            ComPtr<IDXGIAdapter1> adapter) {
+            ComPtr<IDXGIFactory6> dxgiFactory;
+            WEBNN_RETURN_IF_FAILED(CreateDXGIFactory1(IID_PPV_ARGS(&dxgiFactory)));
+            if (useGpu) {
+                UINT adapterIndex = 0;
+                while (dxgiFactory->EnumAdapterByGpuPreference(adapterIndex++, gpuPreference,
+                                                               IID_PPV_ARGS(&adapter)) !=
+                       DXGI_ERROR_NOT_FOUND) {
+                    DXGI_ADAPTER_DESC1 pDesc;
+                    adapter->GetDesc1(&pDesc);
+                    // An adapter called the "Microsoft Basic Render Driver" is always present.
+                    // This adapter is a render-only device that has no display outputs. See here
+                    // for documentation on filtering WARP adapter:
+                    // https://docs.microsoft.com/en-us/windows/desktop/direct3ddxgi/d3d10-graphics-programming-guide-dxgi#new-info-about-enumerating-adapters-for-windows-8
+                    bool isSoftwareAdapter = pDesc.Flags == DXGI_ADAPTER_FLAG_SOFTWARE ||
+                                             (pDesc.VendorId == 0x1414 && pDesc.DeviceId == 0x8c);
+                    if (!isSoftwareAdapter) {
+                        break;
+                    }
+                }
+            } else {
+                WEBNN_RETURN_IF_FAILED(dxgiFactory->EnumWarpAdapter(IID_PPV_ARGS(&adapter)));
+            }
+            return S_OK;
+        }
+
+    }  // namespace
+
+    Backend::Backend(InstanceBase* instance)
+        : BackendConnection(instance, wnn::BackendType::DirectML) {
+    }
+
+    MaybeError Backend::Initialize() {
+        return {};
+    }
+
+    ContextBase* Backend::CreateContext(ContextOptions const* options) {
+        wnn::DevicePreference devicePreference =
+            options == nullptr ? wnn::DevicePreference::Default : options->devicePreference;
+        bool useGpu = devicePreference == wnn::DevicePreference::Cpu ? false : true;
+        DXGI_GPU_PREFERENCE gpuPreference = DXGI_GPU_PREFERENCE_UNSPECIFIED;
+        wnn::PowerPreference powerPreference =
+            options == nullptr ? wnn::PowerPreference::Default : options->powerPreference;
+        switch (powerPreference) {
+            case wnn::PowerPreference::High_performance:
+                gpuPreference = DXGI_GPU_PREFERENCE::DXGI_GPU_PREFERENCE_HIGH_PERFORMANCE;
+                break;
+            case wnn::PowerPreference::Low_power:
+                gpuPreference = DXGI_GPU_PREFERENCE::DXGI_GPU_PREFERENCE_MINIMUM_POWER;
+                break;
+            default:
+                break;
+        }
+
+        bool useDebugLayer = false;
+#ifdef _DEBUG
+        useDebugLayer = true;
+#endif
+        ComPtr<IDXGIAdapter1> adapter;
+        if (FAILED(EnumAdapter(gpuPreference, useGpu, adapter))) {
+            dawn::ErrorLog() << "Failed to enumerate adapters for creating the context.";
+            return nullptr;
+        }
+        return Context::Create(adapter, useDebugLayer);
+    }
+
+    BackendConnection* Connect(InstanceBase* instance) {
+        Backend* backend = new Backend(instance);
+
+        if (instance->ConsumedError(backend->Initialize())) {
+            delete backend;
+            return nullptr;
+        }
+
+        return backend;
+    }
+
+}  // namespace webnn::native::dml
diff --git a/src/webnn/native/dml/BackendDML.h b/src/webnn/native/dml/BackendDML.h
new file mode 100644
index 000000000..25b33fe62
--- /dev/null
+++ b/src/webnn/native/dml/BackendDML.h
@@ -0,0 +1,34 @@
+// Copyright 2019 The Dawn Authors
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef WEBNN_NATIVE_DML_BACKEND_DML_H_
+#define WEBNN_NATIVE_DML_BACKEND_DML_H_
+
+#include "webnn/native/BackendConnection.h"
+#include "webnn/native/Context.h"
+
+namespace webnn::native::dml {
+
+    class Backend : public BackendConnection {
+      public:
+        Backend(InstanceBase* instance);
+
+        MaybeError Initialize();
+        ContextBase* CreateContext(ContextOptions const* options = nullptr) override;
+    };
+
+}  // namespace webnn::native::dml
+
+#endif  // WEBNN_NATIVE_DML_BACKEND_DML_H_
diff --git a/src/webnn/native/dml/ContextDML.cpp b/src/webnn/native/dml/ContextDML.cpp
new file mode 100644
index 000000000..2480d69e3
--- /dev/null
+++ b/src/webnn/native/dml/ContextDML.cpp
@@ -0,0 +1,74 @@
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "webnn/native/dml/ContextDML.h"
+
+#include "webnn/native/dml/GraphDML.h"
+
+namespace webnn::native::dml {
+
+    HRESULT Context::Initialize() {
+        if (mUseDebugLayer) {
+            ComPtr<ID3D12Debug> debug;
+            if (SUCCEEDED(D3D12GetDebugInterface(IID_PPV_ARGS(&debug)))) {
+                debug->EnableDebugLayer();
+            }
+        }
+        WEBNN_RETURN_IF_FAILED(D3D12CreateDevice(mAdapter.Get(), D3D_FEATURE_LEVEL_11_0,
+                                                 IID_PPV_ARGS(&mCommandRecorderDML.D3D12Device)));
+        D3D12_COMMAND_QUEUE_DESC commandQueueDesc{};
+        commandQueueDesc.Type = D3D12_COMMAND_LIST_TYPE_DIRECT;
+        commandQueueDesc.Flags = D3D12_COMMAND_QUEUE_FLAG_NONE;
+        WEBNN_RETURN_IF_FAILED(mCommandRecorderDML.D3D12Device->CreateCommandQueue(
+            &commandQueueDesc, IID_PPV_ARGS(&mCommandRecorderDML.commandQueue)));
+        WEBNN_RETURN_IF_FAILED(mCommandRecorderDML.D3D12Device->CreateCommandAllocator(
+            D3D12_COMMAND_LIST_TYPE_DIRECT, IID_PPV_ARGS(&mCommandRecorderDML.commandAllocator)));
+        WEBNN_RETURN_IF_FAILED(mCommandRecorderDML.D3D12Device->CreateCommandList(
+            0, D3D12_COMMAND_LIST_TYPE_DIRECT, mCommandRecorderDML.commandAllocator.Get(), nullptr,
+            IID_PPV_ARGS(&mCommandRecorderDML.commandList)));
+
+        // Create the DirectML device.
+        ComPtr<ID3D12DebugDevice> debugDevice;
+        if (mUseDebugLayer && SUCCEEDED(mCommandRecorderDML.D3D12Device.As(&debugDevice))) {
+            WEBNN_RETURN_IF_FAILED(DMLCreateDevice(mCommandRecorderDML.D3D12Device.Get(),
+                                                   DML_CREATE_DEVICE_FLAG_DEBUG,
+                                                   IID_PPV_ARGS(&mCommandRecorderDML.device)));
+        } else {
+            WEBNN_RETURN_IF_FAILED(DMLCreateDevice(mCommandRecorderDML.D3D12Device.Get(),
+                                                   DML_CREATE_DEVICE_FLAG_NONE,
+                                                   IID_PPV_ARGS(&mCommandRecorderDML.device)));
+        }
+        return S_OK;
+    };
+
+    // static
+    ContextBase* Context::Create(ComPtr<IDXGIAdapter1> adapter, bool useDebugLayer) {
+        Context* context = new Context(adapter, useDebugLayer);
+        if (FAILED(context->Initialize())) {
+            dawn::ErrorLog() << "Failed to initialize Device.";
+            delete context;
+            return nullptr;
+        }
+        return context;
+    }
+
+    Context::Context(ComPtr<IDXGIAdapter1> adapter, bool useDebugLayer)
+        : mAdapter(std::move(adapter)), mUseDebugLayer(useDebugLayer) {
+    }
+
+    GraphBase* Context::CreateGraphImpl() {
+        return new Graph(this);
+    }
+
+}  // namespace webnn::native::dml
diff --git a/src/webnn/native/dml/ContextDML.h b/src/webnn/native/dml/ContextDML.h
new file mode 100644
index 000000000..cf4aefac5
--- /dev/null
+++ b/src/webnn/native/dml/ContextDML.h
@@ -0,0 +1,57 @@
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef WEBNN_NATIVE_DML_CONTEXT_DML_H_
+#define WEBNN_NATIVE_DML_CONTEXT_DML_H_
+
+#include "webnn/native/Context.h"
+
+#include "common/Log.h"
+#include "dml_platform.h"
+#include "webnn/native/Graph.h"
+
+namespace webnn::native::dml {
+
+    struct CommandRecorderDML {
+        ComPtr<IDMLDevice> device;
+        ComPtr<ID3D12Device> D3D12Device;
+        ComPtr<IDMLCommandRecorder> commandRecorder;
+        ComPtr<ID3D12CommandQueue> commandQueue;
+        ComPtr<ID3D12CommandAllocator> commandAllocator;
+        ComPtr<ID3D12GraphicsCommandList> commandList;
+    };
+
+    class Context : public ContextBase {
+      public:
+        static ContextBase* Create(ComPtr<IDXGIAdapter1> adapter, bool useDebugLayer);
+        ~Context() override = default;
+
+        CommandRecorderDML GetCommandRecorderDML() const {
+            return mCommandRecorderDML;
+        };
+
+      private:
+        Context(ComPtr<IDXGIAdapter1> adapter, bool useDebugLayer);
+        HRESULT Initialize();
+
+        GraphBase* CreateGraphImpl() override;
+
+        CommandRecorderDML mCommandRecorderDML;
+        ComPtr<IDXGIAdapter1> mAdapter;
+        bool mUseDebugLayer = false;
+    };
+
+}  // namespace webnn::native::dml
+
+#endif  // WEBNN_NATIVE_DML_CONTEXT_DML_H_
diff --git a/src/webnn/native/dml/GraphDML.cpp b/src/webnn/native/dml/GraphDML.cpp
new file mode 100644
index 000000000..cb64268ce
--- /dev/null
+++ b/src/webnn/native/dml/GraphDML.cpp
@@ -0,0 +1,33 @@
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "webnn/native/dml/GraphDML.h"
+
+#include "webnn/native/NamedInputs.h"
+#include "webnn/native/NamedOutputs.h"
+
+namespace webnn::native ::dml {
+
+    Graph::Graph(Context* context) : GraphBase(context) {
+    }
+
+    MaybeError Graph::CompileImpl() {
+        return {};
+    }
+
+    MaybeError Graph::ComputeImpl(NamedInputsBase* inputs, NamedOutputsBase* outputs) {
+        return {};
+    }
+
+}  // namespace webnn::native::dml
diff --git a/src/webnn/native/dml/GraphDML.h b/src/webnn/native/dml/GraphDML.h
new file mode 100644
index 000000000..06a30dfbb
--- /dev/null
+++ b/src/webnn/native/dml/GraphDML.h
@@ -0,0 +1,58 @@
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef WEBNN_NATIVE_DML_GRAPH_DML_H_
+#define WEBNN_NATIVE_DML_GRAPH_DML_H_
+
+#include "webnn/native/Graph.h"
+#include "webnn/native/Operand.h"
+#include "webnn/native/Operator.h"
+#include "webnn/native/dml/ContextDML.h"
+#include "webnn/native/ops/BatchNorm.h"
+#include "webnn/native/ops/Binary.h"
+#include "webnn/native/ops/Clamp.h"
+#include "webnn/native/ops/Concat.h"
+#include "webnn/native/ops/Constant.h"
+#include "webnn/native/ops/Conv2d.h"
+#include "webnn/native/ops/Gemm.h"
+#include "webnn/native/ops/Gru.h"
+#include "webnn/native/ops/Input.h"
+#include "webnn/native/ops/InstanceNorm.h"
+#include "webnn/native/ops/LeakyRelu.h"
+#include "webnn/native/ops/Pad.h"
+#include "webnn/native/ops/Pool2d.h"
+#include "webnn/native/ops/Reduce.h"
+#include "webnn/native/ops/Resample2d.h"
+#include "webnn/native/ops/Reshape.h"
+#include "webnn/native/ops/Slice.h"
+#include "webnn/native/ops/Split.h"
+#include "webnn/native/ops/Squeeze.h"
+#include "webnn/native/ops/Transpose.h"
+#include "webnn/native/ops/Unary.h"
+
+namespace webnn::native::dml {
+
+    class Graph : public GraphBase {
+      public:
+        explicit Graph(Context* context);
+        ~Graph() override = default;
+
+      private:
+        MaybeError CompileImpl() override;
+        MaybeError ComputeImpl(NamedInputsBase* inputs, NamedOutputsBase* outputs) override;
+    };
+
+}  // namespace webnn::native::dml
+
+#endif  // WEBNN_NATIVE_DML_GRAPH_DML_H_
diff --git a/src/webnn/native/dml/dml_platform.h b/src/webnn/native/dml/dml_platform.h
new file mode 100644
index 000000000..3762cb56a
--- /dev/null
+++ b/src/webnn/native/dml/dml_platform.h
@@ -0,0 +1,38 @@
+// Copyright 2022 The WebNN-native Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef WEBNN_NATIVE_DML_DMLPLATFORM_H_
+#define WEBNN_NATIVE_DML_DMLPLATFORM_H_
+
+// This micro definition must be added before including "DirectML.h".
+#define DML_TARGET_VERSION_USE_LATEST 1
+
+#include <dxgi1_6.h>
+#include <wrl\client.h>
+
+#include "DirectML.h"
+
+#define WEBNN_RETURN_IF_FAILED(EXPR)                                                              \
+    do {                                                                                          \
+        auto HR = EXPR;                                                                           \
+        if (FAILED(HR)) {                                                                         \
+            dawn::ErrorLog() << "Failed to do " << #EXPR << " Return HRESULT " << std::hex << HR; \
+            return HR;                                                                            \
+        }                                                                                         \
+    } while (0)
+
+
+using Microsoft::WRL::ComPtr;
+
+#endif  // WEBNN_NATIVE_DML_DMLPLATFORM_H_