Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Windows x64 Build with support for xnnpack and llama example #6979

Open
wants to merge 16 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions backends/xnnpack/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,20 @@ foreach(fbs_file ${_xnnpack_schema__srcs})
endforeach()

# Generate the headers from the .fbs files.

if (WIN32)
add_custom_command(
OUTPUT ${_xnnpack_schema__outputs}
COMMAND
${FLATC_EXECUTABLE} --cpp --cpp-std c++11 --scoped-enums -o
"${_xnnpack_schema__include_dir}/executorch/backends/xnnpack/serialization"
${_xnnpack_schema__srcs}
COMMAND powershell -Command "Move-Item -Path ${_xnnpack_flatbuffer__outputs} -Destination ${_xnnpack_schema__outputs}"
WORKING_DIRECTORY ${EXECUTORCH_ROOT}
COMMENT "Generating xnnpack_schema headers"
VERBATIM
)
else()
add_custom_command(
OUTPUT ${_xnnpack_schema__outputs}
COMMAND
Expand All @@ -84,6 +98,7 @@ add_custom_command(
COMMENT "Generating xnnpack_schema headers"
VERBATIM
)
endif()

add_library(xnnpack_schema INTERFACE ${_xnnpack_schema__outputs})
set_target_properties(xnnpack_schema PROPERTIES LINKER_LANGUAGE CXX)
Expand Down
17 changes: 13 additions & 4 deletions build/resolve_buck.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import platform
import stat
import sys
import tempfile
import urllib.request

from dataclasses import dataclass
Expand Down Expand Up @@ -85,6 +84,12 @@ class BuckInfo:
archive_name="buck2-x86_64-apple-darwin.zst",
target_versions=["3eb1ae97ea963086866b4d2d9ffa966d"],
),
("windows", "x86_64"): BuckInfo(
archive_name="buck2-x86_64-pc-windows-msvc.exe.zst",
target_versions=[
"bf1685c4c4ddd9de4592b5a955cb7326fd01e6c4d5f561643422bed961a17401"
],
),
}


Expand Down Expand Up @@ -135,6 +140,8 @@ def resolve_buck2(args: argparse.Namespace) -> Union[str, int]:
os_family = "linux"
elif sys.platform.startswith("darwin"):
os_family = "darwin"
elif sys.platform.startswith("win"):
os_family = "windows"

platform_key = (os_family, arch)
if platform_key not in BUCK_PLATFORM_MAP:
Expand Down Expand Up @@ -193,12 +200,12 @@ def resolve_buck2(args: argparse.Namespace) -> Union[str, int]:

buck2_archive_url = f"https://github.com/facebook/buck2/releases/download/{target_buck_version}/{buck_info.archive_name}"

with tempfile.NamedTemporaryFile() as archive_file:
try:
print(f"Downloading buck2 from {buck2_archive_url}...", file=sys.stderr)
urllib.request.urlretrieve(buck2_archive_url, archive_file.name)
archive_file, _ = urllib.request.urlretrieve(buck2_archive_url)

# Extract and chmod.
with open(archive_file.name, "rb") as f:
with open(archive_file, "rb") as f:
data = f.read()
decompressed_bytes = zstd.decompress(data)

Expand All @@ -207,6 +214,8 @@ def resolve_buck2(args: argparse.Namespace) -> Union[str, int]:

file_stat = os.stat(buck2_local_path)
os.chmod(buck2_local_path, file_stat.st_mode | stat.S_IEXEC)
finally:
os.remove(archive_file)

return buck2_local_path

Expand Down
28 changes: 27 additions & 1 deletion devtools/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -157,19 +157,45 @@ file(MAKE_DIRECTORY
${_program_schema__include_dir}/executorch/devtools/bundled_program
)

# Note that the flatcc project actually writes its outputs into the source
# tree instead of under the binary directory, and there's no way to change
# that behavior.
if(WIN32)
set(_flatcc_bin_path ${_flatcc_source_dir}/bin/${CMAKE_BUILD_TYPE}/flatcc)
else()
set(_flatcc_bin_path ${_flatcc_source_dir}/bin/flatcc)
endif()

if(WIN32)
add_custom_command(
OUTPUT ${_etdump_schema__outputs}
COMMAND
# Note that the flatcc project actually writes its outputs into the source
# tree instead of under the binary directory, and there's no way to change
# that behavior.
${_flatcc_bin_path} -cwr -o
${_program_schema__include_dir}/executorch/devtools/etdump
${_etdump_schema__srcs}
# COMMAND powershell -Command "Remove-Item -Path " ${_etdump_schema_cleanup_paths} " -Force -ErrorAction SilentlyContinue"
COMMAND powershell -Command "if (" "'${_etdump_schema_cleanup_paths}'" "-ne '') { Remove-Item -Path " "'${_etdump_schema_cleanup_paths}'" " -Force -ErrorAction SilentlyContinue }"
DEPENDS ${_etdump_schema_gen_dep}
COMMENT "Generating etdump headers"
)
else()
add_custom_command(
OUTPUT ${_etdump_schema__outputs}
COMMAND
# Note that the flatcc project actually writes its outputs into the source
# tree instead of under the binary directory, and there's no way to change
# that behavior.
${_flatcc_source_dir}/bin/flatcc -cwr -o
${_flatcc_bin_path} -cwr -o
${_program_schema__include_dir}/executorch/devtools/etdump
${_etdump_schema__srcs}
COMMAND rm -f ${_etdump_schema_cleanup_paths}
DEPENDS ${_etdump_schema_gen_dep}
COMMENT "Generating etdump headers"
)
endif()

add_library(
etdump ${CMAKE_CURRENT_SOURCE_DIR}/etdump/etdump_flatcc.cpp
Expand Down
26 changes: 19 additions & 7 deletions examples/models/llama/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ find_package(gflags REQUIRED)
# find `executorch` libraries Same as for gflags
set(executorch_DIR ${CMAKE_CURRENT_BINARY_DIR}/../../../lib/cmake/ExecuTorch)
find_package(executorch CONFIG REQUIRED)
if(CMAKE_TOOLCHAIN_IOS OR ANDROID)
if(CMAKE_TOOLCHAIN_IOS OR ANDROID OR WIN32 OR APPLE)
target_link_options_shared_lib(executorch)
endif()

Expand All @@ -98,14 +98,21 @@ add_subdirectory(runner)
set(link_libraries gflags)
set(_srcs main.cpp)

# in Windows, cpublas and extension_threadpool have duplicated symbols
if(WIN32 AND TARGET xnnpack_backend)
set(CPUBLAS "")
else()
set(CPUBLAS "cpublas")
endif()

if(EXECUTORCH_BUILD_KERNELS_OPTIMIZED)
list(
APPEND
link_libraries
optimized_native_cpu_ops_lib
optimized_kernels
portable_kernels
cpublas
${CPUBLAS}
eigen_blas
)
target_link_options_shared_lib(optimized_native_cpu_ops_lib)
Expand All @@ -131,18 +138,26 @@ if(EXECUTORCH_BUILD_TORCHAO)
endif()

set(XNNPACK_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../../backends/xnnpack)

# in Windows, xnnpack_backend and extension_threadpool have duplicated symbols
if(WIN32 AND TARGET xnnpack_backend)
set(EXTENSION_THREADPOOL "")
else()
set(EXTENSION_THREADPOOL "extension_threadpool")
endif()

# Extra compile option and include dir for pthreadpool
if(EXECUTORCH_BUILD_PTHREADPOOL)
list(APPEND _common_compile_options -DET_USE_THREADPOOL)
list(APPEND link_libraries extension_threadpool pthreadpool)
list(APPEND link_libraries ${EXTENSION_THREADPOOL} pthreadpool)
list(APPEND _common_include_directories
${XNNPACK_ROOT}/third-party/pthreadpool/include
)
endif()

# Extra sources for cpuinfo
if(EXECUTORCH_BUILD_CPUINFO)
list(APPEND link_libraries extension_threadpool cpuinfo)
list(APPEND link_libraries ${EXTENSION_THREADPOOL} cpuinfo)
list(APPEND _common_include_directories
${XNNPACK_ROOT}/third-party/cpuinfo/include
)
Expand Down Expand Up @@ -216,9 +231,6 @@ target_include_directories(llama_main PUBLIC ${_common_include_directories})
target_link_libraries(llama_main PUBLIC llama_runner ${link_libraries})
target_compile_options(llama_main PUBLIC ${_common_compile_options})

if(APPLE)
target_link_options_shared_lib(executorch)
endif()

# Print all summary
executorch_print_configuration_summary()
16 changes: 16 additions & 0 deletions examples/models/llama/install_requirements.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
rem Install snakeviz for cProfile flamegraph
rem Install sentencepiece for llama tokenizer
pip install snakeviz sentencepiece

rem Install torchao.
pip install "%~dp0/../../../third-party/ao"

rem Install lm-eval for Model Evaluation with lm-evalution-harness
rem Install tiktoken for tokenizer
pip install lm_eval==0.4.5
pip install tiktoken blobfile
rem Restore numpy if >= 2.0
pip install "numpy<2.0"

rem Call the install helper for further setup
python examples/models/llama/install_requirement_helper.py
8 changes: 7 additions & 1 deletion examples/models/llama/runner/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,11 @@ include(${EXECUTORCH_SRCS_FILE})

# build llama_runner library
list(TRANSFORM _llama_runner__srcs PREPEND "${EXECUTORCH_ROOT}/")
if (WIN32)
set_property(TARGET extension_module PROPERTY
IMPORTED_IMPLIB "${EXECUTORCH_ROOT}/cmake-out/lib/extension_module.lib")

endif()

target_include_directories(
extension_module INTERFACE ${_common_include_directories}
Expand All @@ -52,6 +57,7 @@ list(APPEND _llama_runner__srcs
if(CMAKE_TOOLCHAIN_IOS
OR ANDROID
OR APPLE
OR WIN32
)
# Building a share library on iOS requires code signing On Android we see
# duplicated registration when using shared lib
Expand Down Expand Up @@ -84,4 +90,4 @@ target_link_libraries(llama_runner PUBLIC ${llama_runner_deps})
target_include_directories(
llama_runner INTERFACE ${_common_include_directories} ${EXECUTORCH_ROOT}
)
target_compile_options(llama_runner PUBLIC ${_preprocessor_flag})
target_compile_options(llama_runner PUBLIC ${_preprocessor_flag})
5 changes: 4 additions & 1 deletion extension/data_loader/file_data_loader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@
#include <cstring>
#include <limits>

#include <executorch/runtime/platform/unistd.h>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>

#include <executorch/runtime/core/error.h>
#include <executorch/runtime/core/result.h>
Expand Down Expand Up @@ -71,6 +71,9 @@ FileDataLoader::~FileDataLoader() {
std::free(const_cast<char*>(file_name_));
// fd_ can be -1 if this instance was moved from, but closing a negative fd is
// safe (though it will return an error).
if (fd_ == -1) {
return;
}
::close(fd_);
}

Expand Down
41 changes: 41 additions & 0 deletions extension/data_loader/mman.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/

// This file ensures that mman.h compatible functions are defined in the global
// namespace for windows and posix environments.

#pragma once

#include <executorch/runtime/platform/compiler.h>

#ifndef _WIN32

#include <sys/mman.h>
#include <unistd.h>

ET_INLINE long get_os_page_size(){return sysconf(_SC_PAGESIZE);}

#else

#define NOMINMAX
#include <windows.h>
#undef NOMINMAX
#include <io.h>

#include <executorch/extension/data_loader/mman_windows.h>

ET_INLINE long get_os_page_size() {
SYSTEM_INFO si;
GetSystemInfo(&si);
long pagesize = si.dwAllocationGranularity > si.dwPageSize
? si.dwAllocationGranularity
: si.dwPageSize;
return pagesize;
}

#endif
Loading