Merge "Revert "Build tests even if we --skip-deps."" into main
diff --git a/build/cmake/adjust_api_level.cmake b/build/cmake/adjust_api_level.cmake
index 780d76c..51e31ab 100644
--- a/build/cmake/adjust_api_level.cmake
+++ b/build/cmake/adjust_api_level.cmake
@@ -1,3 +1,4 @@
+include(${CMAKE_ANDROID_NDK}/build/cmake/abis.cmake)
include(${CMAKE_ANDROID_NDK}/build/cmake/platforms.cmake)
function(adjust_api_level api_level result_name)
@@ -39,15 +40,15 @@
string(REPLACE "android-" "" result ${api_level})
endif()
- # And for LP64 we need to pull up to 21. No diagnostic is provided here
- # because minSdkVersion < 21 is valid for the project even though it may not
- # be for this ABI.
- if(ANDROID_ABI MATCHES "64(-v8a)?$" AND result LESS 21)
+ # Pull up any ABI-specific minimum API levels.
+ set(min_for_abi ${NDK_ABI_${ANDROID_ABI}_MIN_OS_VERSION})
+
+ if(result LESS min_for_abi)
message(STATUS
"android-${result} is not supported for ${ANDROID_ABI}. Using minimum "
- "supported LP64 version 21.")
- set(api_level android-21)
- set(result 21)
+ "supported ${ANDROID_ABI} version ${min_for_abi}.")
+ set(api_level android-${min_for_abi})
+ set(result ${min_for_abi})
endif()
# ANDROID_PLATFORM beyond the maximum is an error. The correct way to specify
diff --git a/build/cmake/android-legacy.toolchain.cmake b/build/cmake/android-legacy.toolchain.cmake
index 0d81f38..a84d350 100644
--- a/build/cmake/android-legacy.toolchain.cmake
+++ b/build/cmake/android-legacy.toolchain.cmake
@@ -30,6 +30,7 @@
# ANDROID_ARM_MODE
# ANDROID_DISABLE_FORMAT_STRING_CHECKS
# ANDROID_CCACHE
+# ANDROID_SANITIZE
cmake_minimum_required(VERSION 3.6.0)
@@ -111,6 +112,8 @@
set(ANDROID_ABI mips)
elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^mips64el-linux-android-")
set(ANDROID_ABI mips64)
+ elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^riscv64-")
+ set(ANDROID_ABI riscv64)
endif()
endif()
if(ANDROID_NATIVE_API_LEVEL AND NOT ANDROID_PLATFORM)
@@ -172,6 +175,7 @@
set(ANDROID_ARM_NEON TRUE)
endif()
+include(${ANDROID_NDK}/build/cmake/abis.cmake)
include(${ANDROID_NDK}/build/cmake/platforms.cmake)
# If no platform version was chosen by the user, default to the minimum version
@@ -214,12 +218,15 @@
string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
endif()
-# And for LP64 we need to pull up to 21. No diagnostic is provided here because
-# minSdkVersion < 21 is valid for the project even though it may not be for this
-# ABI.
-if(ANDROID_ABI MATCHES "64(-v8a)?$" AND ANDROID_PLATFORM_LEVEL LESS 21)
- set(ANDROID_PLATFORM android-21)
- set(ANDROID_PLATFORM_LEVEL 21)
+# Pull up any ABI-specific minimum API levels.
+set(min_for_abi ${NDK_ABI_${ANDROID_ABI}_MIN_OS_VERSION})
+
+if(ANDROID_PLATFORM_LEVEL LESS min_for_abi)
+ message(STATUS
+ "${ANDROID_PLATFORM} is not supported for ${ANDROID_ABI}. Using minimum "
+ "supported ${ANDROID_ABI} version ${min_for_abi}.")
+ set(ANDROID_PLATFORM android-${min_for_abi})
+ set(ANDROID_PLATFORM_LEVEL ${min_for_abi})
endif()
# ANDROID_PLATFORM beyond the maximum is an error. The correct way to specify
@@ -245,6 +252,11 @@
for more information.")
endif()
+if("hwaddress" IN_LIST ANDROID_SANITIZE AND "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "c++_static")
+ message(FATAL_ERROR "\
+ hwaddress does not support c++_static. Use system or c++_shared.")
+endif()
+
set(ANDROID_PIE TRUE)
if(NOT ANDROID_ARM_MODE)
set(ANDROID_ARM_MODE thumb)
@@ -263,6 +275,7 @@
ANDROID_PLATFORM
ANDROID_STL
ANDROID_TOOLCHAIN
+ ANDROID_USE_LEGACY_TOOLCHAIN_FILE
)
# Standard cross-compiling stuff.
@@ -320,6 +333,11 @@
set(CMAKE_SYSTEM_PROCESSOR x86_64)
set(ANDROID_TOOLCHAIN_NAME x86_64-linux-android)
set(ANDROID_LLVM_TRIPLE x86_64-none-linux-android)
+elseif(ANDROID_ABI STREQUAL riscv64)
+ set(ANDROID_SYSROOT_ABI riscv64)
+ set(CMAKE_SYSTEM_PROCESSOR riscv64)
+ set(ANDROID_TOOLCHAIN_NAME riscv64-linux-android)
+ set(ANDROID_LLVM_TRIPLE riscv64-none-linux-android)
else()
message(FATAL_ERROR "Invalid Android ABI: ${ANDROID_ABI}.")
endif()
@@ -332,6 +350,9 @@
set(ANDROID_COMPILER_FLAGS_RELEASE)
set(ANDROID_LINKER_FLAGS)
set(ANDROID_LINKER_FLAGS_EXE)
+set(ANDROID_LINKER_FLAGS_RELEASE)
+set(ANDROID_LINKER_FLAGS_RELWITHDEBINFO)
+set(ANDROID_LINKER_FLAGS_MINSIZEREL)
# STL.
set(ANDROID_CXX_STANDARD_LIBRARIES)
@@ -347,7 +368,7 @@
list(APPEND ANDROID_COMPILER_FLAGS_CXX "-nostdinc++")
list(APPEND ANDROID_LINKER_FLAGS "-nostdlib++")
else()
- message(FATAL_ERROR "Invalid Android STL: ${ANDROID_STL}.")
+ message(FATAL_ERROR "Invalid STL: ${ANDROID_STL}.")
endif()
if(CMAKE_HOST_SYSTEM_NAME STREQUAL Linux)
@@ -365,8 +386,6 @@
# Toolchain.
set(ANDROID_TOOLCHAIN_ROOT
"${ANDROID_NDK}/toolchains/llvm/prebuilt/${ANDROID_HOST_TAG}")
-set(ANDROID_TOOLCHAIN_PREFIX
- "${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_NAME}-")
list(APPEND CMAKE_PREFIX_PATH "${ANDROID_TOOLCHAIN_ROOT}")
@@ -444,6 +463,20 @@
-Werror=unguarded-availability)
endif()
+if("hwaddress" IN_LIST ANDROID_SANITIZE)
+ list(APPEND ANDROID_COMPILER_FLAGS -fsanitize=hwaddress -fno-omit-frame-pointer)
+ list(APPEND ANDROID_LINKER_FLAGS -fsanitize=hwaddress)
+endif()
+
+if("memtag" IN_LIST ANDROID_SANITIZE)
+ list(APPEND ANDROID_COMPILER_FLAGS -fsanitize=memtag-stack -fno-omit-frame-pointer)
+ list(APPEND ANDROID_LINKER_FLAGS -fsanitize=memtag-stack,memtag-heap -fsanitize-memtag-mode=sync)
+ if(ANDROID_ABI STREQUAL arm64-v8a)
+ list(APPEND ANDROID_COMPILER_FLAGS -march=armv8-a+memtag)
+ list(APPEND ANDROID_LINKER_FLAGS -march=armv8-a+memtag)
+ endif()
+endif()
+
# https://github.com/android/ndk/issues/885
# If we're using LLD we need to use a slower build-id algorithm to work around
# the old version of LLDB in Android Studio, which doesn't understand LLD's
@@ -460,8 +493,12 @@
endif()
list(APPEND ANDROID_LINKER_FLAGS -Wl,--fatal-warnings)
-list(APPEND ANDROID_LINKER_FLAGS -Wl,--gc-sections)
-list(APPEND ANDROID_LINKER_FLAGS_EXE -Wl,--gc-sections)
+
+# --gc-sections should not be present for debug builds because that can strip
+# functions that the user may want to evaluate while debugging.
+list(APPEND ANDROID_LINKER_FLAGS_RELEASE -Wl,--gc-sections)
+list(APPEND ANDROID_LINKER_FLAGS_RELWITHDEBINFO -Wl,--gc-sections)
+list(APPEND ANDROID_LINKER_FLAGS_MINSIZEREL -Wl,--gc-sections)
# Debug and release flags.
list(APPEND ANDROID_COMPILER_FLAGS_RELEASE -O3)
@@ -545,6 +582,9 @@
string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_RELEASE "${ANDROID_COMPILER_FLAGS_RELEASE}")
string(REPLACE ";" " " ANDROID_LINKER_FLAGS "${ANDROID_LINKER_FLAGS}")
string(REPLACE ";" " " ANDROID_LINKER_FLAGS_EXE "${ANDROID_LINKER_FLAGS_EXE}")
+string(REPLACE ";" " " ANDROID_LINKER_FLAGS_RELEASE "${ANDROID_LINKER_FLAGS_RELEASE}")
+string(REPLACE ";" " " ANDROID_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO}")
+string(REPLACE ";" " " ANDROID_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL}")
if(ANDROID_CCACHE)
set(CMAKE_C_COMPILER_LAUNCHER "${ANDROID_CCACHE}")
@@ -555,7 +595,6 @@
set(CMAKE_AR "${ANDROID_AR}" CACHE FILEPATH "Archiver")
set(CMAKE_RANLIB "${ANDROID_RANLIB}" CACHE FILEPATH "Ranlib")
set(CMAKE_STRIP "${ANDROID_STRIP}" CACHE FILEPATH "Strip")
-set(_CMAKE_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
if(ANDROID_ABI STREQUAL "x86" OR ANDROID_ABI STREQUAL "x86_64")
set(CMAKE_ASM_NASM_COMPILER
@@ -604,6 +643,15 @@
set(CMAKE_SHARED_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}")
set(CMAKE_MODULE_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_MODULE_LINKER_FLAGS}")
set(CMAKE_EXE_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${ANDROID_LINKER_FLAGS_EXE} ${CMAKE_EXE_LINKER_FLAGS}")
+set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${ANDROID_LINKER_FLAGS_RELEASE} ${CMAKE_SHARED_LINKER_FLAGS_RELEASE}")
+set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${ANDROID_LINKER_FLAGS_RELEASE} ${CMAKE_MODULE_LINKER_FLAGS_RELEASE}")
+set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${ANDROID_LINKER_FLAGS_RELEASE} ${CMAKE_EXE_LINKER_FLAGS_RELEASE}")
+set(CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO} ${CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO}")
+set(CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO} ${CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO}")
+set(CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO} ${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO}")
+set(CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL} ${CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL}")
+set(CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL} ${CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL}")
+set(CMAKE_EXE_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL} ${CMAKE_EXE_LINKER_FLAGS_MINSIZEREL}")
# Compatibility for read-only variables.
# Read-only variables for compatibility with the other toolchain file.
@@ -649,6 +697,8 @@
set(X86 TRUE)
elseif(ANDROID_ABI STREQUAL x86_64)
set(X86_64 TRUE)
+elseif(ANDROID_ABI STREQUAL riscv64)
+ set(RISCV64 TRUE)
endif()
set(ANDROID_NDK_HOST_SYSTEM_NAME ${ANDROID_HOST_TAG})
set(ANDROID_NDK_ABI_NAME ${ANDROID_ABI})
@@ -680,6 +730,8 @@
set(CMAKE_ANDROID_ARCH x86)
elseif(ANDROID_ABI STREQUAL x86_64)
set(CMAKE_ANDROID_ARCH x86_64)
+ elseif(ANDROID_ABI STREQUAL riscv64)
+ set(CMAKE_ANDROID_ARCH riscv64)
endif()
# https://github.com/android/ndk/issues/1012
@@ -687,10 +739,6 @@
set(CMAKE_C_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
set(CMAKE_CXX_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
- set(CMAKE_ASM_ANDROID_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
- set(CMAKE_C_ANDROID_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
- set(CMAKE_CXX_ANDROID_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
-
set(CMAKE_ASM_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
set(CMAKE_C_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
set(CMAKE_CXX_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
diff --git a/build/cmake/android.toolchain.cmake b/build/cmake/android.toolchain.cmake
index 898873c..48cee3f 100644
--- a/build/cmake/android.toolchain.cmake
+++ b/build/cmake/android.toolchain.cmake
@@ -30,6 +30,7 @@
# ANDROID_ARM_MODE
# ANDROID_DISABLE_FORMAT_STRING_CHECKS
# ANDROID_CCACHE
+# ANDROID_SANITIZE
cmake_minimum_required(VERSION 3.6.0)
@@ -118,6 +119,8 @@
set(CMAKE_ANDROID_ARCH_ABI x86)
elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^x86_64-")
set(CMAKE_ANDROID_ARCH_ABI x86_64)
+ elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^riscv64-")
+ set(CMAKE_ANDROID_ARCH_ABI riscv64)
else()
set(CMAKE_ANDROID_ARCH_ABI armeabi-v7a)
endif()
@@ -180,6 +183,11 @@
set(CMAKE_ANDROID_STL_TYPE ${ANDROID_STL})
endif()
+if("hwaddress" IN_LIST ANDROID_SANITIZE AND "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "c++_static")
+ message(FATAL_ERROR "\
+ hwaddress does not support c++_static. Use system or c++_shared.")
+endif()
+
if("${CMAKE_ANDROID_STL_TYPE}" STREQUAL "gnustl_shared" OR
"${CMAKE_ANDROID_STL_TYPE}" STREQUAL "gnustl_static" OR
"${CMAKE_ANDROID_STL_TYPE}" STREQUAL "stlport_shared" OR
@@ -247,6 +255,7 @@
ANDROID_STL
ANDROID_TOOLCHAIN
ANDROID_USE_LEGACY_TOOLCHAIN_FILE
+ ANDROID_SANITIZE
)
if(DEFINED ANDROID_NO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
diff --git a/build/cmake/exports.cmake b/build/cmake/exports.cmake
index af74fb5..e78a2bd 100644
--- a/build/cmake/exports.cmake
+++ b/build/cmake/exports.cmake
@@ -72,6 +72,8 @@
set(X86 TRUE)
elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL "x86_64")
set(X86_64 TRUE)
+elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL "riscv64")
+ set(RISCV64 TRUE)
endif()
set(ANDROID_NDK_HOST_SYSTEM_NAME "${ANDROID_HOST_TAG}")
set(ANDROID_NDK_ABI_NAME "${CMAKE_ANDROID_ARCH_ABI}")
diff --git a/build/cmake/flags.cmake b/build/cmake/flags.cmake
index d2cbb91..b231f3c 100644
--- a/build/cmake/flags.cmake
+++ b/build/cmake/flags.cmake
@@ -36,6 +36,20 @@
" -Werror=unguarded-availability")
endif()
+if("hwaddress" IN_LIST ANDROID_SANITIZE)
+ string(APPEND _ANDROID_NDK_INIT_CFLAGS " -fsanitize=hwaddress -fno-omit-frame-pointer")
+ string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -fsanitize=hwaddress")
+endif()
+
+if("memtag" IN_LIST ANDROID_SANITIZE)
+ string(APPEND _ANDROID_NDK_INIT_CFLAGS " -fsanitize=memtag-stack -fno-omit-frame-pointer")
+ string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -fsanitize=memtag-stack,memtag-heap -fsanitize-memtag-mode=sync")
+ if(CMAKE_ANDROID_ARCH_ABI STREQUAL "arm64-v8a")
+ string(APPEND _ANDROID_NDK_INIT_CFLAGS " -march=armv8-a+memtag")
+ string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -march=armv8-a+memtag")
+ endif()
+endif()
+
string(APPEND _ANDROID_NDK_INIT_CFLAGS_DEBUG " -fno-limit-debug-info")
# If we're using LLD we need to use a slower build-id algorithm to work around
@@ -56,6 +70,9 @@
endif()
string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--fatal-warnings")
+# This should only be set for release modes, but CMake doesn't provide a way for
+# us to be that specific in the new toolchain file.
+# https://github.com/android/ndk/issues/1813
string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--gc-sections")
string(APPEND _ANDROID_NDK_INIT_LDFLAGS_EXE " -Wl,--gc-sections")
diff --git a/build/cmake/hooks/pre/Determine-Compiler.cmake b/build/cmake/hooks/pre/Determine-Compiler.cmake
index ef0228f..79cc6c0 100644
--- a/build/cmake/hooks/pre/Determine-Compiler.cmake
+++ b/build/cmake/hooks/pre/Determine-Compiler.cmake
@@ -20,21 +20,23 @@
return()
endif()
-# If we don't explicitly set the target CMake will ID the compiler using the
-# default target, causing MINGW to be defined when a Windows host is used.
-# https://github.com/android/ndk/issues/1581
-# https://gitlab.kitware.com/cmake/cmake/-/issues/22647
-if(CMAKE_ANDROID_ARCH_ABI STREQUAL armeabi-v7a)
- set(ANDROID_LLVM_TRIPLE armv7-none-linux-androideabi)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL arm64-v8a)
- set(ANDROID_LLVM_TRIPLE aarch64-none-linux-android)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL x86)
- set(ANDROID_LLVM_TRIPLE i686-none-linux-android)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL x86_64)
- set(ANDROID_LLVM_TRIPLE x86_64-none-linux-android)
-else()
- message(FATAL_ERROR "Invalid Android ABI: ${ANDROID_ABI}.")
+if(${CMAKE_VERSION} VERSION_LESS "3.22.0")
+ # If we don't explicitly set the target CMake will ID the compiler using the
+ # default target, causing MINGW to be defined when a Windows host is used.
+ # https://github.com/android/ndk/issues/1581
+ # https://gitlab.kitware.com/cmake/cmake/-/issues/22647
+ if(CMAKE_ANDROID_ARCH_ABI STREQUAL armeabi-v7a)
+ set(ANDROID_LLVM_TRIPLE armv7-none-linux-androideabi)
+ elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL arm64-v8a)
+ set(ANDROID_LLVM_TRIPLE aarch64-none-linux-android)
+ elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL x86)
+ set(ANDROID_LLVM_TRIPLE i686-none-linux-android)
+ elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL x86_64)
+ set(ANDROID_LLVM_TRIPLE x86_64-none-linux-android)
+ else()
+ message(FATAL_ERROR "Invalid Android ABI: ${ANDROID_ABI}.")
+ endif()
+ set(CMAKE_ASM_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
+ set(CMAKE_C_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
+ set(CMAKE_CXX_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
endif()
-set(CMAKE_ASM_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
-set(CMAKE_C_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
-set(CMAKE_CXX_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
diff --git a/build/core/build-binary.mk b/build/core/build-binary.mk
index bdb2411..4a15ed3 100644
--- a/build/core/build-binary.mk
+++ b/build/core/build-binary.mk
@@ -155,20 +155,7 @@
LOCAL_CPP_EXTENSION := $(default-c++-extensions)
endif
-ifneq ($(NDK_APP_STL),system)
- LOCAL_CFLAGS += -nostdinc++
- LOCAL_LDFLAGS += -nostdlib++
-else
- # TODO: Remove when https://reviews.llvm.org/D55856 is merged.
- #
- # The system STL Android.mk will export -lstdc++, but the Clang driver will
- # helpfully rewrite -lstdc++ to whatever the default C++ stdlib linker
- # arguments are, except in the presence of -nostdlib and -nodefaultlibs.
- # That part of the driver does not account for -nostdlib++. We can fix the
- # behavior by using -stdlib=libstdc++ so it rewrites -lstdc++ to -lstdc++
- # instead of -lc++.
- LOCAL_LDFLAGS += -stdlib=libstdc++
-endif
+include $(BUILD_SYSTEM)/stl.mk
#
# If LOCAL_ALLOW_UNDEFINED_SYMBOLS is not true, the linker will allow the generation
@@ -493,6 +480,12 @@
$(LOCAL_BUILT_MODULE): PRIVATE_CC := $(TARGET_CC)
$(LOCAL_BUILT_MODULE): PRIVATE_SYSROOT_API_LIB_DIR := $(SYSROOT_API_LIB_DIR)
+ifeq (,$(call module_needs_clangxx,$(LOCAL_MODULE)))
+$(LOCAL_BUILT_MODULE): PRIVATE_LD_DRIVER := $(TARGET_CC)
+else
+$(LOCAL_BUILT_MODULE): PRIVATE_LD_DRIVER := $(TARGET_CXX)
+endif
+
ifeq ($(call module-get-class,$(LOCAL_MODULE)),STATIC_LIBRARY)
#
diff --git a/build/core/default-build-commands.mk b/build/core/default-build-commands.mk
index 80dd875..59c0592 100644
--- a/build/core/default-build-commands.mk
+++ b/build/core/default-build-commands.mk
@@ -42,8 +42,7 @@
TARGET_DISABLE_FORMAT_STRING_CFLAGS := -Wno-error=format-security
define cmd-build-shared-library
-$(PRIVATE_CXX) \
- -Wl,--gc-sections \
+$(PRIVATE_LD_DRIVER) \
-Wl,-soname,$(notdir $(LOCAL_BUILT_MODULE)) \
-shared \
$(PRIVATE_LINKER_OBJECTS_AND_LIBRARIES) \
@@ -58,8 +57,7 @@
# directly needed. ld.gold (default for all other architectures) doesn't emulate
# this buggy behavior.
define cmd-build-executable
-$(PRIVATE_CXX) \
- -Wl,--gc-sections \
+$(PRIVATE_LD_DRIVER) \
-Wl,-rpath-link=$(call host-path,$(PRIVATE_SYSROOT_API_LIB_DIR)) \
-Wl,-rpath-link=$(call host-path,$(TARGET_OUT)) \
$(PRIVATE_LINKER_OBJECTS_AND_LIBRARIES) \
@@ -131,6 +129,10 @@
-target $(LLVM_TRIPLE)$(TARGET_PLATFORM_LEVEL) \
-no-canonical-prefixes \
+ifeq ($(APP_OPTIM),release)
+ GLOBAL_LDFLAGS += -Wl,--gc-sections
+endif
+
GLOBAL_CXXFLAGS = $(GLOBAL_CFLAGS) -fno-exceptions -fno-rtti
TARGET_CFLAGS =
diff --git a/build/core/definitions.mk b/build/core/definitions.mk
index 9312fb7..437ac6b 100644
--- a/build/core/definitions.mk
+++ b/build/core/definitions.mk
@@ -690,31 +690,10 @@
$(eval __extensions := $(call module-get-c++-extensions,$1))\
$(filter $(foreach __extension,$(__extensions),%$(__extension)),$(__files))
-# Returns true if a module has C++ sources
-#
+# Returns a non-empty string if a module has C++ sources
module-has-c++-sources = $(strip $(call module-get-c++-sources,$1) \
$(filter true,$(__ndk_modules.$1.HAS_CPP)))
-
-# Add C++ dependencies to any module that has C++ sources.
-# $1: list of C++ runtime static libraries (if any)
-# $2: list of C++ runtime shared libraries (if any)
-# $3: list of C++ runtime ldlibs (if any)
-modules-add-c++-dependencies = \
- $(foreach __module,$(__ndk_modules),\
- $(if $(call module-has-c++-sources,$(__module)),\
- $(call ndk_log,Module '$(__module)' has C++ sources)\
- $(call module-add-c++-deps,$(__module),$1,$2,$3),\
- )\
- $(if $(call module-has-c++-features,$(__module),rtti exceptions),\
- $(if $(filter system,$(NDK_APP_STL)),\
- $(call ndk_log,Module '$(__module)' uses C++ features and the system STL)\
- $(call import-module,cxx-stl/llvm-libc++)\
- $(call import-module,cxx-stl/llvm-libc++abi)\
- $(call module-add-c++-deps,$(__module),c++abi)))\
- )
-
-
# Return the compiler flags used to compile a C++ module
# Order matters and should match the one used by the build command
module-get-c++-flags = $(strip \
@@ -786,21 +765,20 @@
$(if $(filter $2,$(__cxxflags)),true,)\
)
-# Add standard C++ dependencies to a given module
+# Returns a non-empty string if the module should be linked with clang++ rather
+# than clang.
#
-# $1: module name
-# $2: list of C++ runtime static libraries (if any)
-# $3: list of C++ runtime shared libraries (if any)
-# $4: list of C++ runtime ldlibs (if any)
-#
-module-add-c++-deps = \
- $(if $(call strip,$2),$(call ndk_log,Add dependency '$(call strip,$2)' to module '$1'))\
- $(eval __ndk_modules.$1.STATIC_LIBRARIES += $(2))\
- $(if $(call strip,$3),$(call ndk_log,Add dependency '$(call strip,$3)' to module '$1'))\
- $(eval __ndk_modules.$1.SHARED_LIBRARIES += $(3))\
- $(if $(call strip,$4),$(call ndk_log,Add dependency '$(call strip,$4)' to module '$1'))\
- $(eval __ndk_modules.$1.LDLIBS += $(4))
-
+# A module should use clang++ iff it has C++ sources itself or if it depends on
+# a static library with C++ sources. We do not need to use clang++ for shared
+# library dependencies.
+module_needs_clangxx = $(strip \
+ $(call module-has-c++-sources,$1)\
+ $(foreach __dep,$(call module-get-all-dependencies,$1),\
+ $(if $(call module-is-static-library,$(__dep)),\
+ $(call module-has-c++-sources,$(__dep))\
+ )\
+ )\
+)
# =============================================================================
#
@@ -1198,6 +1176,7 @@
NDK_APP_VARS_OPTIONAL += \
APP_WRAP_SH_armeabi-v7a \
APP_WRAP_SH_arm64-v8a \
+ APP_WRAP_SH_riscv64 \
APP_WRAP_SH_x86 \
APP_WRAP_SH_x86_64 \
@@ -1886,21 +1865,8 @@
#
# The list of registered STL implementations we support
-NDK_STL_LIST :=
+NDK_STL_LIST := c++_shared c++_static system none
-# Used internally to register a given STL implementation, see below.
-#
-# $1: STL name as it appears in APP_STL (e.g. system)
-# $2: STL module path (e.g. cxx-stl/system)
-# $3: list of static libraries all modules will depend on
-# $4: list of shared libraries all modules will depend on
-#
-ndk-stl-register = \
- $(eval __ndk_stl := $(strip $1)) \
- $(eval NDK_STL_LIST += $(__ndk_stl)) \
- $(eval NDK_STL.$(__ndk_stl).IMPORT_MODULE := $(strip $2)) \
- $(eval NDK_STL.$(__ndk_stl).STATIC_LIBS := $(strip $(call strip-lib-prefix,$3))) \
- $(eval NDK_STL.$(__ndk_stl).SHARED_LIBS := $(strip $(call strip-lib-prefix,$4))) \
# Called to check that the value of APP_STL is a valid one.
# $1: STL name as it apperas in APP_STL (e.g. 'system')
@@ -1911,42 +1877,6 @@
$(call __ndk_info,Please use one of the following instead: $(NDK_STL_LIST))\
$(call __ndk_error,Aborting))
-# Called before the top-level Android.mk is parsed to
-# select the STL implementation.
-# $1: STL name as it appears in APP_STL (e.g. system)
-#
-ndk-stl-select = \
- $(if $(filter none,$1),,\
- $(if $(NDK_STL.$1.IMPORT_MODULE),\
- $(call import-module,$(NDK_STL.$1.IMPORT_MODULE)) \
- )\
- )
-
-# Called after all Android.mk files are parsed to add
-# proper STL dependencies to every C++ module.
-# $1: STL name as it appears in APP_STL (e.g. system)
-#
-ndk-stl-add-dependencies = \
- $(call modules-add-c++-dependencies,\
- $(NDK_STL.$1.STATIC_LIBS),\
- $(NDK_STL.$1.SHARED_LIBS),\
- $(NDK_STL.$1.LDLIBS))
-
-$(call ndk-stl-register,none)
-$(call ndk-stl-register,system)
-
-$(call ndk-stl-register,\
- c++_static,\
- cxx-stl/llvm-libc++,\
- c++_static\
- )
-
-$(call ndk-stl-register,\
- c++_shared,\
- cxx-stl/llvm-libc++,\
- ,\
- c++_shared\
- )
ifneq (,$(NDK_UNIT_TESTS))
$(call ndk-run-all-tests)
diff --git a/build/core/install_stl.mk b/build/core/install_stl.mk
new file mode 100644
index 0000000..dcea75d
--- /dev/null
+++ b/build/core/install_stl.mk
@@ -0,0 +1,22 @@
+# Not bothering to check if there's actually any C++ code in the app. c++_shared
+# is not the default, so if someone has set it explicitly we might as well do
+# what they say.
+ifeq ($(APP_STL),c++_shared)
+
+NDK_LIBCXX_TARGET := $(NDK_APP_DST_DIR)/libc++_shared.so
+NDK_LIBCXX_LIB_PATH := $(SYSROOT_LIB_DIR)/libc++_shared.so
+
+installed_modules: $(NDK_LIBCXX_TARGET)
+
+$(NDK_LIBCXX_TARGET): PRIVATE_ABI := $(TARGET_ARCH_ABI)
+$(NDK_LIBCXX_TARGET): PRIVATE_NAME := Install
+$(NDK_LIBCXX_TARGET): PRIVATE_SRC := $(NDK_LIBCXX_LIB_PATH)
+$(NDK_LIBCXX_TARGET): PRIVATE_DST := $(NDK_LIBCXX_TARGET)
+
+$(call generate-file-dir,$(NDK_LIBCXX_TARGET))
+
+$(NDK_LIBCXX_TARGET): clean-installed-binaries
+ $(call host-echo-build-step,$(PRIVATE_ABI),$(PRIVATE_NAME) "$(call pretty-dir,$(PRIVATE_DST))")
+ $(hide) $(call host-install,$(PRIVATE_SRC),$(PRIVATE_DST))
+
+endif
\ No newline at end of file
diff --git a/build/core/sanitizers.mk b/build/core/sanitizers.mk
index 9e03492..0967ee6 100644
--- a/build/core/sanitizers.mk
+++ b/build/core/sanitizers.mk
@@ -47,3 +47,11 @@
NDK_APP_WRAP_SH_$(TARGET_ARCH_ABI) := $(NDK_ROOT)/wrap.sh/asan.sh
endif
endif
+
+# If the user has not specified their own wrap.sh and is using HWASAN, install a
+# default HWASAN wrap.sh for them.
+ifneq (,$(filter hwaddress,$(NDK_SANITIZERS)))
+ ifeq ($(NDK_NO_USER_WRAP_SH),true)
+ NDK_APP_WRAP_SH_$(TARGET_ARCH_ABI) := $(NDK_ROOT)/wrap.sh/hwasan.sh
+ endif
+endif
diff --git a/build/core/setup-abi.mk b/build/core/setup-abi.mk
index c406c55..b6325ee 100644
--- a/build/core/setup-abi.mk
+++ b/build/core/setup-abi.mk
@@ -28,12 +28,12 @@
TARGET_PLATFORM_LEVEL := $(APP_PLATFORM_LEVEL)
-# 64-bit ABIs were first supported in API 21. Pull up these ABIs if the app has
-# a lower minSdkVersion.
-ifneq ($(filter $(NDK_KNOWN_DEVICE_ABI64S),$(TARGET_ARCH_ABI)),)
- ifneq ($(call lt,$(TARGET_PLATFORM_LEVEL),21),)
- TARGET_PLATFORM_LEVEL := 21
- endif
+# Pull up the minSdkVersion for this ABI if it is higher than the user's
+# APP_PLATFORM. A warning will be separately emitted in setup-app-platform.mk if
+# the user's APP_PLATFORM is too low for the NDK overall.
+MIN_OS_FOR_TARGET := $(NDK_ABI_${TARGET_ARCH_ABI}_MIN_OS_VERSION)
+ifneq ($(call lt,$(TARGET_PLATFORM_LEVEL),$(MIN_OS_FOR_TARGET)),)
+ TARGET_PLATFORM_LEVEL := $(MIN_OS_FOR_TARGET)
endif
# Not used by ndk-build, but are documented for use by Android.mk files.
diff --git a/build/core/setup-toolchain.mk b/build/core/setup-toolchain.mk
index 185bf53..d1d36fb 100644
--- a/build/core/setup-toolchain.mk
+++ b/build/core/setup-toolchain.mk
@@ -111,8 +111,6 @@
# free the dictionary of LOCAL_MODULE definitions
$(call modules-clear)
-$(call ndk-stl-select,$(NDK_APP_STL))
-
# now parse the Android.mk for the application, this records all
# module declarations, but does not populate the dependency graph yet.
include $(NDK_APP_BUILD_SCRIPT)
@@ -126,14 +124,13 @@
# has -fsanitize in its ldflags.
include $(BUILD_SYSTEM)/sanitizers.mk
include $(BUILD_SYSTEM)/openmp.mk
+ include $(BUILD_SYSTEM)/install_stl.mk
ifneq ($(NDK_APP_WRAP_SH_$(TARGET_ARCH_ABI)),)
include $(BUILD_SYSTEM)/install_wrap_sh.mk
endif
endif
-$(call ndk-stl-add-dependencies,$(NDK_APP_STL))
-
# recompute all dependencies between modules
$(call modules-compute-dependencies)
diff --git a/build/core/stl.mk b/build/core/stl.mk
new file mode 100644
index 0000000..ef1c645
--- /dev/null
+++ b/build/core/stl.mk
@@ -0,0 +1,63 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Interprets APP_STL to configure default cflags/ldflags and install rules for
+# libc++ runtime libraries as necessary.
+
+ifneq (,$(call module_needs_clangxx,$(LOCAL_MODULE)))
+
+ifeq ($(APP_STL),none)
+LOCAL_CPPFLAGS += -nostdinc++
+LOCAL_LDFLAGS += -nostdlib++
+else ifeq ($(APP_STL),system)
+# TODO: Actually use the system STL headers for that mode or remove.
+#
+# I'm not sure how long this has been broken, but released NDKs do not use the
+# bionic C++ headers when APP_STL=system, they use the libc++ headers. That's
+# almost certainly unintentional.
+#
+# There may not actually be any compatibility issues with this because bionic's
+# libstdc++ only provides new and delete anyway (the rest of the behavior is
+# just headers that re-expose C APIs in the std namespace). We could choose to
+# delete bionic's headers and keep this "bug" instead.
+
+# LOCAL_CPPFLAGS += -stdlib=libstdc++
+# LOCAL_LDFLAGS += -stdlib=libstdc++
+
+# TODO: Remove when https://reviews.llvm.org/D55856 is merged.
+#
+# The system STL Android.mk will export -lstdc++, but the Clang driver will
+# helpfully rewrite -lstdc++ to whatever the default C++ stdlib linker
+# arguments are, except in the presence of -nostdlib and -nodefaultlibs.
+# That part of the driver does not account for -nostdlib++. We can fix the
+# behavior by using -stdlib=libstdc++ so it rewrites -lstdc++ to -lstdc++
+# instead of -lc++.
+LOCAL_LDFLAGS += -stdlib=libstdc++
+
+ifneq (,$(call module-has-c++-features,$(LOCAL_MODULE),rtti exceptions))
+ LOCAL_LDLIBS += -lc++abi
+endif
+
+else ifeq ($(APP_STL),c++_static)
+LOCAL_LDFLAGS += -static-libstdc++
+endif
+
+# Else c++_shared, and no flags are needed. Shared libc++ is the default
+# behavior for Android targets in Clang.
+#
+# Invalid values will be checked by ndk-stl-check.
+
+endif
diff --git a/build/core/toolchains/riscv64-linux-android-clang/config.mk b/build/core/toolchains/riscv64-linux-android-clang/config.mk
new file mode 100644
index 0000000..cc1b4ce
--- /dev/null
+++ b/build/core/toolchains/riscv64-linux-android-clang/config.mk
@@ -0,0 +1,20 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# config file for the riscv64 clang toolchain for the Android NDK the real meat
+# is in the setup.mk file adjacent to this one
+#
+TOOLCHAIN_ARCH := riscv64
+TOOLCHAIN_ABIS := riscv64
diff --git a/build/core/toolchains/riscv64-linux-android-clang/setup.mk b/build/core/toolchains/riscv64-linux-android-clang/setup.mk
new file mode 100644
index 0000000..f367e16
--- /dev/null
+++ b/build/core/toolchains/riscv64-linux-android-clang/setup.mk
@@ -0,0 +1,47 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+TOOLCHAIN_NAME := riscv64-linux-android
+LLVM_TRIPLE := riscv64-none-linux-android
+
+TARGET_TOOLCHAIN_ARCH_LIB_DIR := riscv64
+TARGET_ASAN_BASENAME := libclang_rt.asan-riscv64-android.so
+# TODO(https://github.com/android/ndk/issues/1041): Add TSAN when it builds for RISCV64.
+# TARGET_TSAN_BASENAME := libclang_rt.tsan-riscv64-android.so
+TARGET_UBSAN_BASENAME := libclang_rt.ubsan_standalone-riscv64-android.so
+
+TARGET_CFLAGS := -fPIC
+
+TARGET_riscv64_release_CFLAGS := \
+ -O2 \
+ -DNDEBUG \
+
+TARGET_riscv64_debug_CFLAGS := \
+ -O0 \
+ -UNDEBUG \
+ -fno-limit-debug-info \
+
+# This function will be called to determine the target CFLAGS used to build
+# a C or Assembler source file, based on its tags.
+#
+TARGET-process-src-files-tags = \
+$(eval __debug_sources := $(call get-src-files-with-tag,debug)) \
+$(eval __release_sources := $(call get-src-files-without-tag,debug)) \
+$(call set-src-files-target-cflags, $(__debug_sources), $(TARGET_riscv64_debug_CFLAGS)) \
+$(call set-src-files-target-cflags, $(__release_sources),$(TARGET_riscv64_release_CFLAGS)) \
+
+# The ABI-specific sub-directory that the SDK tools recognize for
+# this toolchain's generated binaries
+TARGET_ABI_SUBDIR := riscv64
diff --git a/build/dump_compile_commands.py b/build/dump_compile_commands.py
index 39ab68a..38ee90e 100644
--- a/build/dump_compile_commands.py
+++ b/build/dump_compile_commands.py
@@ -23,39 +23,45 @@
import argparse
import json
import os
+from pathlib import Path
from shlex import join
+
def get_argument_parser() -> argparse.ArgumentParser:
"""Parses and returns command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument(
- '-o',
- '--output',
+ "-o",
+ "--output",
type=os.path.realpath, # type: ignore
required=True,
- help='Path to output file')
+ help="Path to output file",
+ )
parser.add_argument(
- '-d',
- '--directory',
+ "-d",
+ "--directory",
type=os.path.realpath, # type: ignore
- help='Working directory for the compile command.')
+ help="Working directory for the compile command.",
+ )
- parser.add_argument('-f', '--file', help='Source file.')
- parser.add_argument('--object-file', help='Object file.')
+ parser.add_argument("-f", "--file", help="Source file.")
+ parser.add_argument("--object-file", help="Object file.")
parser.add_argument(
- '--command-file',
+ "--command-file",
type=os.path.realpath, # type: ignore
- help='Compilation command list file.')
+ help="Compilation command list file.",
+ )
parser.add_argument(
- 'compile_command',
- metavar='COMPILE_COMMAND',
+ "compile_command",
+ metavar="COMPILE_COMMAND",
nargs=argparse.REMAINDER,
- help='Compilation command.')
+ help="Compilation command.",
+ )
return parser
@@ -66,25 +72,26 @@
args = parser.parse_args()
if args.command_file and args.compile_command:
- parser.error(
- '--command-file and COMPILE_COMMAND are mutually exclusive')
+ parser.error("--command-file and COMPILE_COMMAND are mutually exclusive")
if not args.command_file and not args.compile_command:
- parser.error('Either --command-file or COMPILE_COMMAND is required.')
+ parser.error("Either --command-file or COMPILE_COMMAND is required.")
command = join(args.compile_command)
if args.command_file:
- with open(args.command_file) as command_file:
- command = command_file.read().strip()
+ command = Path(args.command_file).read_text(encoding="utf-8").strip()
- with open(args.output, 'w') as out_file:
- json.dump({
- 'directory': args.directory,
- 'file': args.file,
- 'output': args.object_file,
- 'command': command,
- }, out_file)
+ with open(args.output, "w", encoding="utf-8") as out_file:
+ json.dump(
+ {
+ "directory": args.directory,
+ "file": args.file,
+ "output": args.object_file,
+ "command": command,
+ },
+ out_file,
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/build/extract_manifest.py b/build/extract_manifest.py
index ef84fc7..1898562 100644
--- a/build/extract_manifest.py
+++ b/build/extract_manifest.py
@@ -26,19 +26,28 @@
parser = argparse.ArgumentParser()
parser.add_argument(
- 'property', metavar='PROPERTY',
- choices=('minSdkVersion', 'debuggable'),
- help='Property to extract from the manifest file.')
+ "property",
+ metavar="PROPERTY",
+ choices=("minSdkVersion", "debuggable"),
+ help="Property to extract from the manifest file.",
+ )
parser.add_argument(
- 'manifest_file', metavar='MANIFEST_FILE', type=os.path.abspath, # type: ignore
- help='Path to the AndroidManifest.xml file.')
+ "manifest_file",
+ metavar="MANIFEST_FILE",
+ type=os.path.abspath, # type: ignore
+ help="Path to the AndroidManifest.xml file.",
+ )
return parser.parse_args()
-def get_rpath_attribute(root: xml.etree.ElementTree.Element, element_path: str,
- attribute: str, default: str = '') -> str:
+def get_rpath_attribute(
+ root: xml.etree.ElementTree.Element,
+ element_path: str,
+ attribute: str,
+ default: str = "",
+) -> str:
"""Returns the value of an attribute at an rpath.
If more than one element exists with the same name, only the first is
@@ -53,17 +62,17 @@
The attribute's value as a string if found, else the value of
`default`.
"""
- ns_url = 'http://schemas.android.com/apk/res/android'
+ ns_url = "http://schemas.android.com/apk/res/android"
ns = {
- 'android': ns_url,
+ "android": ns_url,
}
elem = root.find(element_path, ns)
if elem is None:
- return ''
+ return ""
# ElementTree elements don't have the same helpful namespace parameter that
# the find family does :(
- attrib_name = attribute.replace('android:', '{' + ns_url + '}')
+ attrib_name = attribute.replace("android:", "{" + ns_url + "}")
return str(elem.get(attrib_name, default))
@@ -73,7 +82,7 @@
Returns:
String form of android:minSdkVersion if found, else the empty string.
"""
- return get_rpath_attribute(root, './uses-sdk', 'android:minSdkVersion', '')
+ return get_rpath_attribute(root, "./uses-sdk", "android:minSdkVersion", "")
def get_debuggable(root: xml.etree.ElementTree.Element) -> str:
@@ -82,13 +91,12 @@
Returns:
String form of android:debuggable if found, else the empty string.
"""
- debuggable = get_rpath_attribute(
- root, './application', 'android:debuggable', '')
+ debuggable = get_rpath_attribute(root, "./application", "android:debuggable", "")
# Though any such manifest would be invalid, the awk script rewrote bogus
# values to false. Missing attributes should also be false.
- if debuggable != 'true':
- debuggable = 'false'
+ if debuggable != "true":
+ debuggable = "false"
return debuggable
@@ -97,13 +105,13 @@
args = parse_args()
tree = xml.etree.ElementTree.parse(args.manifest_file)
- if args.property == 'minSdkVersion':
+ if args.property == "minSdkVersion":
print(get_minsdkversion(tree.getroot()))
- elif args.property == 'debuggable':
+ elif args.property == "debuggable":
print(get_debuggable(tree.getroot()))
else:
raise ValueError
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/build/extract_platform.py b/build/extract_platform.py
index 2d50efb..3b59176 100644
--- a/build/extract_platform.py
+++ b/build/extract_platform.py
@@ -27,8 +27,11 @@
parser = argparse.ArgumentParser()
parser.add_argument(
- 'properties_file', metavar='PROPERTIES_FILE', type=os.path.abspath, # type: ignore
- help='Path to the project.properties file.')
+ "properties_file",
+ metavar="PROPERTIES_FILE",
+ type=os.path.abspath, # type: ignore
+ help="Path to the project.properties file.",
+ )
return parser.parse_args()
@@ -39,16 +42,16 @@
Returns:
String form of the platform version if found, else "unknown".
"""
- android_regex = re.compile(r'(android-\w+)')
- vendor_regex = re.compile(r':(\d+)\s*$')
+ android_regex = re.compile(r"(android-\w+)")
+ vendor_regex = re.compile(r":(\d+)\s*$")
for line in properties_file:
match = android_regex.search(line)
if match is not None:
return match.group(1)
match = vendor_regex.search(line)
if match is not None:
- return 'android-{}'.format(match.group(1))
- return 'unknown'
+ return "android-{}".format(match.group(1))
+ return "unknown"
def main() -> None:
@@ -66,9 +69,9 @@
# android- may be followed by either the numeric API level or the named
# platform. Note that while we can parse any name, ndk-build only support a
# small handful.
- with open(args.properties_file) as properties_file:
+ with open(args.properties_file, encoding="utf-8") as properties_file:
print(get_platform(properties_file))
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/build/gen_compile_db.py b/build/gen_compile_db.py
index 3aafe3f..5cf7a90 100644
--- a/build/gen_compile_db.py
+++ b/build/gen_compile_db.py
@@ -23,6 +23,7 @@
import argparse
import json
import os
+from pathlib import Path
def parse_args() -> argparse.Namespace:
@@ -30,21 +31,25 @@
parser = argparse.ArgumentParser()
parser.add_argument(
- '-o', '--output', type=os.path.realpath, help='Path to output file') # type: ignore
+ "-o", "--output", type=os.path.realpath, help="Path to output file"
+ ) # type: ignore
def maybe_list_file(arg: str) -> str:
- if arg.startswith('@'):
- return '@' + os.path.realpath(arg[1:])
+ if arg.startswith("@"):
+ return "@" + os.path.realpath(arg[1:])
return os.path.realpath(arg)
parser.add_argument(
- 'command_files',
- metavar='FILE',
+ "command_files",
+ metavar="FILE",
type=maybe_list_file,
- nargs='+',
- help=('Path to the compilation database for a single object. If the '
- 'argument begins with @ it will be treated as a list file '
- 'containing paths to the one or more JSON files.'))
+ nargs="+",
+ help=(
+ "Path to the compilation database for a single object. If the "
+ "argument begins with @ it will be treated as a list file "
+ "containing paths to the one or more JSON files."
+ ),
+ )
return parser.parse_args()
@@ -56,24 +61,21 @@
all_commands = []
command_files = []
for command_file in args.command_files:
- if command_file.startswith('@'):
- with open(command_file[1:]) as list_file:
- command_files.extend(list_file.read().split())
+ if command_file.startswith("@"):
+ list_file = Path(command_file[1:])
+ command_files.extend(list_file.read_text(encoding="utf-8").split())
else:
command_files.append(command_file)
for command_file_path in command_files:
- with open(command_file_path) as command_file:
+ with open(command_file_path, encoding="utf-8") as command_file:
all_commands.append(json.load(command_file))
- with open(args.output, 'w') as out_file:
+ with open(args.output, "w", encoding="utf-8") as out_file:
json.dump(
- all_commands,
- out_file,
- sort_keys=True,
- indent=4,
- separators=(',', ': '))
+ all_commands, out_file, sort_keys=True, indent=4, separators=(",", ": ")
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/build/gen_cygpath.py b/build/gen_cygpath.py
index 4174ac4..0100e80 100644
--- a/build/gen_cygpath.py
+++ b/build/gen_cygpath.py
@@ -35,7 +35,7 @@
Returns:
A list of tuples mapping cygwin paths to Windows paths.
"""
- mount_regex = re.compile(r'^(\S+) on (\S+) .*$')
+ mount_regex = re.compile(r"^(\S+) on (\S+) .*$")
# We use a list of tuples rather than a dict because we want to recurse on
# the list later anyway.
@@ -47,16 +47,16 @@
if match is not None:
win_path = match.group(1)
cyg_path = match.group(2)
- if cyg_path == '/':
+ if cyg_path == "/":
# Since we're going to be using patsubst on these, we need to
# make sure that the rule for / is applied last, otherwise
# we'll replace all other cygwin paths with that one.
mounts.insert(0, (cyg_path, win_path))
- elif cyg_path.startswith('/cygdrive/'):
+ elif cyg_path.startswith("/cygdrive/"):
# We need both /cygdrive/c and /cygdrive/C to point to C:.
letter = posixpath.basename(cyg_path)
- lower_path = posixpath.join('/cygdrive', letter.lower())
- upper_path = posixpath.join('/cygdrive', letter.upper())
+ lower_path = posixpath.join("/cygdrive", letter.lower())
+ upper_path = posixpath.join("/cygdrive", letter.upper())
mounts.append((lower_path, win_path))
mounts.append((upper_path, win_path))
else:
@@ -77,17 +77,18 @@
# We're building a bunch of nested patsubst calls. Once we've written each
# of the calls, we pass the function input to the inner most call.
if not mounts:
- return '$1'
+ return "$1"
cyg_path, win_path = mounts[0]
- if not cyg_path.endswith('/'):
- cyg_path += '/'
- if not win_path.endswith('/'):
- win_path += '/'
+ if not cyg_path.endswith("/"):
+ cyg_path += "/"
+ if not win_path.endswith("/"):
+ win_path += "/"
other_mounts = mounts[1:]
- return '$(patsubst {}%,{}%,\n{})'.format(
- cyg_path, win_path, make_cygpath_function(other_mounts))
+ return "$(patsubst {}%,{}%,\n{})".format(
+ cyg_path, win_path, make_cygpath_function(other_mounts)
+ )
def main() -> None:
@@ -98,5 +99,5 @@
print(make_cygpath_function(mounts))
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/build/ldflags_to_sanitizers.py b/build/ldflags_to_sanitizers.py
index dc1533d..e2af032 100644
--- a/build/ldflags_to_sanitizers.py
+++ b/build/ldflags_to_sanitizers.py
@@ -25,12 +25,12 @@
"""Returns the sanitizers enabled by a given set of ldflags."""
sanitizers = set()
for arg in args:
- if arg.startswith('-fsanitize='):
- sanitizer_list = arg.partition('=')[2]
- sanitizers |= set(sanitizer_list.split(','))
- elif arg.startswith('-fno-sanitize='):
- sanitizer_list = arg.partition('=')[2]
- sanitizers -= set(sanitizer_list.split(','))
+ if arg.startswith("-fsanitize="):
+ sanitizer_list = arg.partition("=")[2]
+ sanitizers |= set(sanitizer_list.split(","))
+ elif arg.startswith("-fno-sanitize="):
+ sanitizer_list = arg.partition("=")[2]
+ sanitizers -= set(sanitizer_list.split(","))
return sorted(list(sanitizers))
@@ -46,7 +46,7 @@
"""
modules: list[list[str]] = [[]]
for arg in args:
- if arg == '--module':
+ if arg == "--module":
modules.append([])
else:
modules[-1].append(arg)
@@ -60,15 +60,16 @@
# MODULE_FLAGS sections.
if len(argv) < 2:
sys.exit(
- 'usage: ldflags_to_sanitizers.py [GLOBAL_FLAGS] '
- '--module [MODULE_FLAGS] [--module [MODULE_FLAGS]...]')
+ "usage: ldflags_to_sanitizers.py [GLOBAL_FLAGS] "
+ "--module [MODULE_FLAGS] [--module [MODULE_FLAGS]...]"
+ )
global_flags, modules_flags = argv_to_module_arg_lists(argv[1:])
all_sanitizers = list(sanitizers_from_args(global_flags))
for module_flags in modules_flags:
all_sanitizers.extend(sanitizers_from_args(module_flags))
- print(' '.join(sorted(set(all_sanitizers))), file=stream)
+ print(" ".join(sorted(set(all_sanitizers))), file=stream)
-if __name__ == '__main__':
+if __name__ == "__main__":
main(sys.argv)
diff --git a/build/ndk-build.cmd b/build/ndk-build.cmd
index 3006992..d45c5f9 100755
--- a/build/ndk-build.cmd
+++ b/build/ndk-build.cmd
@@ -1,5 +1,16 @@
@echo off
setlocal
+
+rem This is checked in build-local.mk... but make on windows doesn't handle
+rem LAST_MAKEFILE correctly when the makefile is in a directory with spaces
+rem anyway, so that defense doesn't work either.
+rem https://github.com/android/ndk/issues/1400
+rem https://stackoverflow.com/a/29057742/632035
+for /f "tokens=2" %%a in ("%~dp0") do (
+ echo ERROR: NDK path cannot contain spaces
+ exit /b 1
+)
+
rem Unset PYTHONPATH and PYTHONHOME to prevent the user's environment from
rem affecting the Python that we invoke.
rem See https://github.com/googlesamples/vulkan-basic-samples/issues/25
diff --git a/build/test_extract_manifest.py b/build/test_extract_manifest.py
index 9259b7a..70652c1 100644
--- a/build/test_extract_manifest.py
+++ b/build/test_extract_manifest.py
@@ -24,7 +24,8 @@
class ExtractMinSdkVersionTest(unittest.TestCase):
def testMinSdkVersion(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -41,14 +42,15 @@
</application>
<uses-sdk android:minSdkVersion="9"/>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- '9', build.extract_manifest.get_minsdkversion(root))
+ self.assertEqual("9", build.extract_manifest.get_minsdkversion(root))
def testUsesSdkMissingMinSdkVersion(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -65,14 +67,15 @@
</application>
<uses-sdk android:maxSdkVersion="21"/>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- '', build.extract_manifest.get_minsdkversion(root))
+ self.assertEqual("", build.extract_manifest.get_minsdkversion(root))
def testNoUsesSdk(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -88,16 +91,17 @@
</activity>
</application>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- '', build.extract_manifest.get_minsdkversion(root))
+ self.assertEqual("", build.extract_manifest.get_minsdkversion(root))
class ExtractDebuggableTest(unittest.TestCase):
def testIsDebuggable(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -109,14 +113,15 @@
android:debuggable="true">
</application>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- 'true', build.extract_manifest.get_debuggable(root))
+ self.assertEqual("true", build.extract_manifest.get_debuggable(root))
def testIsNotDebuggable(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -128,14 +133,15 @@
android:debuggable="false">
</application>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- 'false', build.extract_manifest.get_debuggable(root))
+ self.assertEqual("false", build.extract_manifest.get_debuggable(root))
def testBogusValue(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -147,14 +153,15 @@
android:debuggable="bogus">
</application>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- 'false', build.extract_manifest.get_debuggable(root))
+ self.assertEqual("false", build.extract_manifest.get_debuggable(root))
def testNotSet(self) -> None:
- xml_str = textwrap.dedent("""\
+ xml_str = textwrap.dedent(
+ """\
<?xml version="1.0" encoding="utf-8"?>
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
@@ -169,8 +176,8 @@
</application>
<uses-sdk android:maxSdkVersion="21"/>
</manifest>
- """)
+ """
+ )
root = xml.etree.ElementTree.fromstring(xml_str)
- self.assertEqual(
- 'false', build.extract_manifest.get_debuggable(root))
+ self.assertEqual("false", build.extract_manifest.get_debuggable(root))
diff --git a/build/test_extract_platform.py b/build/test_extract_platform.py
index 267f459..08b0c2e 100644
--- a/build/test_extract_platform.py
+++ b/build/test_extract_platform.py
@@ -24,40 +24,51 @@
class ExtractPlatformTest(unittest.TestCase):
def testNumericVersion(self) -> None:
- props_file = StringIO(textwrap.dedent("""\
+ props_file = StringIO(
+ textwrap.dedent(
+ """\
some
# other
junk
target=android-9
foo
- """))
+ """
+ )
+ )
- self.assertEqual(
- 'android-9', build.extract_platform.get_platform(props_file))
+ self.assertEqual("android-9", build.extract_platform.get_platform(props_file))
def testNamedVersion(self) -> None:
- props_file = StringIO(textwrap.dedent("""\
+ props_file = StringIO(
+ textwrap.dedent(
+ """\
some
# other
junk
target=android-nougat
foo
- """))
+ """
+ )
+ )
self.assertEqual(
- 'android-nougat', build.extract_platform.get_platform(props_file))
+ "android-nougat", build.extract_platform.get_platform(props_file)
+ )
def testVendorVersion(self) -> None:
- props_file = StringIO(textwrap.dedent("""\
+ props_file = StringIO(
+ textwrap.dedent(
+ """\
some
# other
junk
target=vendor:something:21
foo
- """))
+ """
+ )
+ )
- self.assertEqual(
- 'android-21', build.extract_platform.get_platform(props_file))
+ self.assertEqual("android-21", build.extract_platform.get_platform(props_file))
def testNoVersion(self) -> None:
- self.assertEqual('unknown', build.extract_platform.get_platform(StringIO('')))
+ self.assertEqual("unknown", build.extract_platform.get_platform(StringIO("")))
diff --git a/build/test_gen_cygpath.py b/build/test_gen_cygpath.py
index 7ded6c0..367c351 100644
--- a/build/test_gen_cygpath.py
+++ b/build/test_gen_cygpath.py
@@ -23,63 +23,66 @@
class GetMountsTest(unittest.TestCase):
def testSingleMount(self) -> None:
- mount_output = 'C:/cygwin on / type ntfs (binary,auto)'
+ mount_output = "C:/cygwin on / type ntfs (binary,auto)"
self.assertEqual(
- [('/', 'C:/cygwin')], build.gen_cygpath.get_mounts(mount_output))
+ [("/", "C:/cygwin")], build.gen_cygpath.get_mounts(mount_output)
+ )
def testCaseInsensitiveMount(self) -> None:
- mount_output = 'C: on /cygdrive/c type ntfs'
+ mount_output = "C: on /cygdrive/c type ntfs"
expected_output = [
- ('/cygdrive/c', 'C:'),
- ('/cygdrive/C', 'C:'),
+ ("/cygdrive/c", "C:"),
+ ("/cygdrive/C", "C:"),
]
- self.assertEqual(
- expected_output, build.gen_cygpath.get_mounts(mount_output))
+ self.assertEqual(expected_output, build.gen_cygpath.get_mounts(mount_output))
def testManyMounts(self) -> None:
- mount_output = textwrap.dedent("""\
+ mount_output = textwrap.dedent(
+ """\
C:/cygwin/bin on /usr/bin type ntfs (binary,auto)
C:/cygwin/lib on /usr/lib type ntfs (binary,auto)
C:/cygwin on / type ntfs (binary,auto)
C: on /cygdrive/c type ntfs (binary,posix=0,user,noumount,auto)
D: on /cygdrive/d type udf (binary,posix=0,user,noumount,auto)
- """)
+ """
+ )
expected_output = [
- ('/', 'C:/cygwin'),
- ('/usr/bin', 'C:/cygwin/bin'),
- ('/usr/lib', 'C:/cygwin/lib'),
- ('/cygdrive/c', 'C:'),
- ('/cygdrive/C', 'C:'),
- ('/cygdrive/d', 'D:'),
- ('/cygdrive/D', 'D:'),
+ ("/", "C:/cygwin"),
+ ("/usr/bin", "C:/cygwin/bin"),
+ ("/usr/lib", "C:/cygwin/lib"),
+ ("/cygdrive/c", "C:"),
+ ("/cygdrive/C", "C:"),
+ ("/cygdrive/d", "D:"),
+ ("/cygdrive/D", "D:"),
]
- self.assertEqual(
- expected_output, build.gen_cygpath.get_mounts(mount_output))
+ self.assertEqual(expected_output, build.gen_cygpath.get_mounts(mount_output))
class MakeCygpathFunctionTest(unittest.TestCase):
def testSingleMount(self) -> None:
- mounts = [('/', 'C:/cygwin')]
- expected_output = '$(patsubst /%,C:/cygwin/%,\n$1)'
+ mounts = [("/", "C:/cygwin")]
+ expected_output = "$(patsubst /%,C:/cygwin/%,\n$1)"
self.assertEqual(
- expected_output, build.gen_cygpath.make_cygpath_function(mounts))
+ expected_output, build.gen_cygpath.make_cygpath_function(mounts)
+ )
def testManyMounts(self) -> None:
mounts = [
- ('/', 'C:/cygwin'),
- ('/usr/bin', 'C:/cygwin/bin'),
- ('/usr/lib', 'C:/cygwin/lib'),
- ('/cygdrive/c', 'C:'),
- ('/cygdrive/C', 'C:'),
- ('/cygdrive/d', 'D:'),
- ('/cygdrive/D', 'D:'),
+ ("/", "C:/cygwin"),
+ ("/usr/bin", "C:/cygwin/bin"),
+ ("/usr/lib", "C:/cygwin/lib"),
+ ("/cygdrive/c", "C:"),
+ ("/cygdrive/C", "C:"),
+ ("/cygdrive/d", "D:"),
+ ("/cygdrive/D", "D:"),
]
- expected_output = textwrap.dedent("""\
+ expected_output = textwrap.dedent(
+ """\
$(patsubst /%,C:/cygwin/%,
$(patsubst /usr/bin/%,C:/cygwin/bin/%,
$(patsubst /usr/lib/%,C:/cygwin/lib/%,
@@ -87,7 +90,9 @@
$(patsubst /cygdrive/C/%,C:/%,
$(patsubst /cygdrive/d/%,D:/%,
$(patsubst /cygdrive/D/%,D:/%,
- $1)))))))""")
+ $1)))))))"""
+ )
self.assertEqual(
- expected_output, build.gen_cygpath.make_cygpath_function(mounts))
+ expected_output, build.gen_cygpath.make_cygpath_function(mounts)
+ )
diff --git a/build/test_ldflags_to_sanitizers.py b/build/test_ldflags_to_sanitizers.py
index eaddcae..1e0d91a 100644
--- a/build/test_ldflags_to_sanitizers.py
+++ b/build/test_ldflags_to_sanitizers.py
@@ -32,73 +32,105 @@
def test_sanitizers_from_args_no_sanitize_args(self) -> None:
"""Tests that we don't identify sanitizers when there are none."""
self.assertListEqual([], sanitizers_from_args([]))
- self.assertListEqual([], sanitizers_from_args(['foo', 'bar']))
+ self.assertListEqual([], sanitizers_from_args(["foo", "bar"]))
def test_sanitizers_from_args_enabled_sanitizers(self) -> None:
"""Tests that we find enabled sanitizers."""
+ self.assertListEqual(["address"], sanitizers_from_args(["-fsanitize=address"]))
self.assertListEqual(
- ['address'], sanitizers_from_args(['-fsanitize=address']))
+ ["address"], sanitizers_from_args(["-fsanitize=address", "foo"])
+ )
self.assertListEqual(
- ['address'], sanitizers_from_args(['-fsanitize=address', 'foo']))
+ ["address", "undefined"],
+ sanitizers_from_args(["-fsanitize=address", "-fsanitize=undefined"]),
+ )
self.assertListEqual(
- ['address', 'undefined'],
- sanitizers_from_args(
- ['-fsanitize=address', '-fsanitize=undefined']))
+ ["address", "undefined"],
+ sanitizers_from_args(["-fsanitize=address,undefined"]),
+ )
self.assertListEqual(
- ['address', 'undefined'],
- sanitizers_from_args(['-fsanitize=address,undefined']))
- self.assertListEqual(
- ['address', 'undefined'],
- sanitizers_from_args(['-fsanitize=address,undefined', 'foo']))
+ ["address", "undefined"],
+ sanitizers_from_args(["-fsanitize=address,undefined", "foo"]),
+ )
def test_sanitizers_from_args_disabled_sanitizers(self) -> None:
"""Tests that we don't find disabled sanitizers."""
- self.assertListEqual([], sanitizers_from_args(
- ['-fno-sanitize=address']))
- self.assertListEqual([], sanitizers_from_args(
- ['-fno-sanitize=address', 'foo']))
- self.assertListEqual([], sanitizers_from_args(
- ['-fno-sanitize=address', '-fno-sanitize=undefined']))
- self.assertListEqual([], sanitizers_from_args(
- ['-fno-sanitize=address,undefined']))
- self.assertListEqual([], sanitizers_from_args(
- ['-fno-sanitize=address,undefined', 'foo']))
+ self.assertListEqual([], sanitizers_from_args(["-fno-sanitize=address"]))
+ self.assertListEqual([], sanitizers_from_args(["-fno-sanitize=address", "foo"]))
+ self.assertListEqual(
+ [],
+ sanitizers_from_args(["-fno-sanitize=address", "-fno-sanitize=undefined"]),
+ )
+ self.assertListEqual(
+ [], sanitizers_from_args(["-fno-sanitize=address,undefined"])
+ )
+ self.assertListEqual(
+ [], sanitizers_from_args(["-fno-sanitize=address,undefined", "foo"])
+ )
def test_sanitizers_from_args_enabled_disabled_sanitizers(self) -> None:
"""Tests that we correctly identify only enabled sanitizers."""
- self.assertListEqual([], sanitizers_from_args(
- ['-fsanitize=address', '-fno-sanitize=address']))
- self.assertListEqual(['address'], sanitizers_from_args(
- ['-fsanitize=address', '-fno-sanitize=address',
- '-fsanitize=address']))
- self.assertListEqual([], sanitizers_from_args(
- ['-fsanitize=address', '-fno-sanitize=address',
- '-fsanitize=address', '-fno-sanitize=address']))
- self.assertListEqual(['undefined'], sanitizers_from_args(
- ['-fsanitize=address,undefined', '-fno-sanitize=address']))
- self.assertListEqual(['undefined'], sanitizers_from_args(
- ['-fsanitize=address', '-fsanitize=undefined',
- '-fno-sanitize=address']))
+ self.assertListEqual(
+ [], sanitizers_from_args(["-fsanitize=address", "-fno-sanitize=address"])
+ )
+ self.assertListEqual(
+ ["address"],
+ sanitizers_from_args(
+ ["-fsanitize=address", "-fno-sanitize=address", "-fsanitize=address"]
+ ),
+ )
+ self.assertListEqual(
+ [],
+ sanitizers_from_args(
+ [
+ "-fsanitize=address",
+ "-fno-sanitize=address",
+ "-fsanitize=address",
+ "-fno-sanitize=address",
+ ]
+ ),
+ )
+ self.assertListEqual(
+ ["undefined"],
+ sanitizers_from_args(
+ ["-fsanitize=address,undefined", "-fno-sanitize=address"]
+ ),
+ )
+ self.assertListEqual(
+ ["undefined"],
+ sanitizers_from_args(
+ ["-fsanitize=address", "-fsanitize=undefined", "-fno-sanitize=address"]
+ ),
+ )
def test_argv_to_module_arg_lists(self) -> None:
"""Tests that modules' arguments are properly identified."""
self.assertTupleEqual(([], []), argv_to_module_arg_lists([]))
- self.assertTupleEqual((['foo'], []), argv_to_module_arg_lists(['foo']))
+ self.assertTupleEqual((["foo"], []), argv_to_module_arg_lists(["foo"]))
self.assertTupleEqual(
- ([], [['foo', 'bar'], ['baz']]),
- argv_to_module_arg_lists(
- ['--module', 'foo', 'bar', '--module', 'baz']))
+ ([], [["foo", "bar"], ["baz"]]),
+ argv_to_module_arg_lists(["--module", "foo", "bar", "--module", "baz"]),
+ )
self.assertTupleEqual(
- (['foo', 'bar'], [['baz']]),
- argv_to_module_arg_lists(['foo', 'bar', '--module', 'baz']))
+ (["foo", "bar"], [["baz"]]),
+ argv_to_module_arg_lists(["foo", "bar", "--module", "baz"]),
+ )
def test_main(self) -> None:
"""Test that the program itself works."""
sio = StringIO()
ldflags_main(
- ['ldflags_to_sanitizers.py', '-fsanitize=undefined', '--module',
- '-fsanitize=address,thread', '-fno-sanitize=thread',
- '--module', '-fsanitize=undefined'], sio)
- self.assertEqual('address undefined', sio.getvalue().strip())
+ [
+ "ldflags_to_sanitizers.py",
+ "-fsanitize=undefined",
+ "--module",
+ "-fsanitize=address,thread",
+ "-fno-sanitize=thread",
+ "--module",
+ "-fsanitize=undefined",
+ ],
+ sio,
+ )
+ self.assertEqual("address undefined", sio.getvalue().strip())
diff --git a/build/tools/make_standalone_toolchain.py b/build/tools/make_standalone_toolchain.py
index d92d9e7..22951ae 100755
--- a/build/tools/make_standalone_toolchain.py
+++ b/build/tools/make_standalone_toolchain.py
@@ -16,8 +16,10 @@
#
"""Creates a toolchain installation for a given Android target.
-The output of this tool is a more typical cross-compiling toolchain. It is
-indended to be used with existing build systems such as autotools.
+THIS TOOL IS OBSOLETE. It is no longer necessary to create a separate toolchain for use
+with build systems that lack explicit NDK support. The compiler installed to
+<NDK>/toolchains/llvm/prebuilt/<host>/bin can be used directly. See
+https://developer.android.com/ndk/guides/other_build_systems for more details.
"""
import argparse
import atexit
@@ -30,10 +32,10 @@
import sys
import tempfile
import textwrap
-
+from pathlib import Path
THIS_DIR = os.path.realpath(os.path.dirname(__file__))
-NDK_DIR = os.path.realpath(os.path.join(THIS_DIR, '../..'))
+NDK_DIR = os.path.realpath(os.path.join(THIS_DIR, "../.."))
def logger():
@@ -44,54 +46,65 @@
def check_ndk_or_die():
"""Verify that our NDK installation is somewhat present or die."""
checks = [
- 'build/core',
- 'prebuilt',
- 'toolchains',
+ "build/core",
+ "prebuilt",
+ "toolchains",
]
for check in checks:
check_path = os.path.join(NDK_DIR, check)
if not os.path.exists(check_path):
- sys.exit('Missing {}'.format(check_path))
+ sys.exit("Missing {}".format(check_path))
def get_triple(arch):
"""Return the triple for the given architecture."""
return {
- 'arm': 'arm-linux-androideabi',
- 'arm64': 'aarch64-linux-android',
- 'x86': 'i686-linux-android',
- 'x86_64': 'x86_64-linux-android',
+ "arm": "arm-linux-androideabi",
+ "arm64": "aarch64-linux-android",
+ "riscv64": "riscv64-linux-android",
+ "x86": "i686-linux-android",
+ "x86_64": "x86_64-linux-android",
+ }[arch]
+
+
+def arch_to_abi(arch: str) -> str:
+ """Return the ABI name for the given architecture."""
+ return {
+ "arm": "armeabi-v7a",
+ "arm64": "arm64-v8a",
+ "riscv64": "riscv64",
+ "x86": "x86",
+ "x86_64": "x86_64",
}[arch]
def get_host_tag_or_die():
"""Return the host tag for this platform. Die if not supported."""
- if sys.platform.startswith('linux'):
- return 'linux-x86_64'
- elif sys.platform == 'darwin':
- return 'darwin-x86_64'
- elif sys.platform == 'win32' or sys.platform == 'cygwin':
- return 'windows-x86_64'
- sys.exit('Unsupported platform: ' + sys.platform)
+ if sys.platform.startswith("linux"):
+ return "linux-x86_64"
+ elif sys.platform == "darwin":
+ return "darwin-x86_64"
+ elif sys.platform == "win32" or sys.platform == "cygwin":
+ return "windows-x86_64"
+ sys.exit("Unsupported platform: " + sys.platform)
def get_toolchain_path_or_die(host_tag):
"""Return the toolchain path or die."""
- toolchain_path = os.path.join(NDK_DIR, 'toolchains/llvm/prebuilt',
- host_tag)
+ toolchain_path = os.path.join(NDK_DIR, "toolchains/llvm/prebuilt", host_tag)
if not os.path.exists(toolchain_path):
- sys.exit('Could not find toolchain: {}'.format(toolchain_path))
+ sys.exit("Could not find toolchain: {}".format(toolchain_path))
return toolchain_path
def make_clang_target(triple, api):
"""Returns the Clang target for the given GNU triple and API combo."""
- arch, os_name, env = triple.split('-')
- if arch == 'arm':
- arch = 'armv7a' # Target armv7, not armv5.
+ arch, os_name, env = triple.split("-")
+ if arch == "arm":
+ arch = "armv7a" # Target armv7, not armv5.
- return '{}-{}-{}{}'.format(arch, os_name, env, api)
+ return "{}-{}-{}{}".format(arch, os_name, env, api)
def make_clang_scripts(install_dir, arch, api, windows):
@@ -106,39 +119,44 @@
Create wrapper scripts that invoke Clang with `-target` and `--sysroot`
preset.
"""
- with open(os.path.join(install_dir, 'AndroidVersion.txt')) as version_file:
+ with open(os.path.join(install_dir, "AndroidVersion.txt")) as version_file:
first_line = version_file.read().strip().splitlines()[0]
- major, minor, _build = first_line.split('.')
+ major, minor, _build = first_line.split(".")
version_number = major + minor
- exe = ''
+ exe = ""
if windows:
- exe = '.exe'
+ exe = ".exe"
- bin_dir = os.path.join(install_dir, 'bin')
- shutil.move(os.path.join(bin_dir, 'clang' + exe),
- os.path.join(bin_dir, 'clang{}'.format(version_number) + exe))
- shutil.move(os.path.join(bin_dir, 'clang++' + exe),
- os.path.join(bin_dir, 'clang{}++'.format(
- version_number) + exe))
+ bin_dir = os.path.join(install_dir, "bin")
+ shutil.move(
+ os.path.join(bin_dir, "clang" + exe),
+ os.path.join(bin_dir, "clang{}".format(version_number) + exe),
+ )
+ shutil.move(
+ os.path.join(bin_dir, "clang++" + exe),
+ os.path.join(bin_dir, "clang{}++".format(version_number) + exe),
+ )
triple = get_triple(arch)
target = make_clang_target(triple, api)
- flags = '-target {}'.format(target)
+ flags = "-target {}".format(target)
# We only need mstackrealign to fix issues on 32-bit x86 pre-24. After 24,
# this consumes an extra register unnecessarily, which can cause issues for
# inline asm.
# https://github.com/android-ndk/ndk/issues/693
- if arch == 'i686' and api < 24:
- flags += ' -mstackrealign'
+ if arch == "i686" and api < 24:
+ flags += " -mstackrealign"
cxx_flags = str(flags)
- clang_path = os.path.join(install_dir, 'bin/clang')
- with open(clang_path, 'w') as clang:
- clang.write(textwrap.dedent("""\
+ clang_path = os.path.join(install_dir, "bin/clang")
+ with open(clang_path, "w") as clang:
+ clang.write(
+ textwrap.dedent(
+ """\
#!/usr/bin/env bash
bin_dir=`dirname "$0"`
if [ "$1" != "-cc1" ]; then
@@ -147,14 +165,20 @@
# target/triple already spelled out.
"$bin_dir/clang{version}" "$@"
fi
- """.format(version=version_number, flags=flags)))
+ """.format(
+ version=version_number, flags=flags
+ )
+ )
+ )
mode = os.stat(clang_path).st_mode
os.chmod(clang_path, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
- clangpp_path = os.path.join(install_dir, 'bin/clang++')
- with open(clangpp_path, 'w') as clangpp:
- clangpp.write(textwrap.dedent("""\
+ clangpp_path = os.path.join(install_dir, "bin/clang++")
+ with open(clangpp_path, "w") as clangpp:
+ clangpp.write(
+ textwrap.dedent(
+ """\
#!/usr/bin/env bash
bin_dir=`dirname "$0"`
if [ "$1" != "-cc1" ]; then
@@ -163,21 +187,30 @@
# target/triple already spelled out.
"$bin_dir/clang{version}++" "$@"
fi
- """.format(version=version_number, flags=cxx_flags)))
+ """.format(
+ version=version_number, flags=cxx_flags
+ )
+ )
+ )
mode = os.stat(clangpp_path).st_mode
os.chmod(clangpp_path, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
- shutil.copy2(os.path.join(install_dir, 'bin/clang'),
- os.path.join(install_dir, 'bin', triple + '-clang'))
- shutil.copy2(os.path.join(install_dir, 'bin/clang++'),
- os.path.join(install_dir, 'bin', triple + '-clang++'))
+ shutil.copy2(
+ os.path.join(install_dir, "bin/clang"),
+ os.path.join(install_dir, "bin", triple + "-clang"),
+ )
+ shutil.copy2(
+ os.path.join(install_dir, "bin/clang++"),
+ os.path.join(install_dir, "bin", triple + "-clang++"),
+ )
if windows:
- for pp_suffix in ('', '++'):
- is_cpp = pp_suffix == '++'
- exe_name = 'clang{}{}.exe'.format(version_number, pp_suffix)
- clangbat_text = textwrap.dedent("""\
+ for pp_suffix in ("", "++"):
+ is_cpp = pp_suffix == "++"
+ exe_name = "clang{}{}.exe".format(version_number, pp_suffix)
+ clangbat_text = textwrap.dedent(
+ """\
@echo off
setlocal
call :find_bin
@@ -200,25 +233,28 @@
exit /b
:done
- """.format(exe=exe_name, flags=cxx_flags if is_cpp else flags))
+ """.format(
+ exe=exe_name, flags=cxx_flags if is_cpp else flags
+ )
+ )
- for triple_prefix in ('', triple + '-'):
+ for triple_prefix in ("", triple + "-"):
clangbat_path = os.path.join(
- install_dir, 'bin',
- '{}clang{}.cmd'.format(triple_prefix, pp_suffix))
- with open(clangbat_path, 'w') as clangbat:
+ install_dir, "bin", "{}clang{}.cmd".format(triple_prefix, pp_suffix)
+ )
+ with open(clangbat_path, "w") as clangbat:
clangbat.write(clangbat_text)
def replace_gcc_wrappers(install_path, triple, is_windows):
- cmd = '.cmd' if is_windows else ''
+ cmd = ".cmd" if is_windows else ""
- gcc = os.path.join(install_path, 'bin', triple + '-gcc' + cmd)
- clang = os.path.join(install_path, 'bin', 'clang' + cmd)
+ gcc = os.path.join(install_path, "bin", triple + "-gcc" + cmd)
+ clang = os.path.join(install_path, "bin", "clang" + cmd)
shutil.copy2(clang, gcc)
- gpp = os.path.join(install_path, 'bin', triple + '-g++' + cmd)
- clangpp = os.path.join(install_path, 'bin', 'clang++' + cmd)
+ gpp = os.path.join(install_path, "bin", triple + "-g++" + cmd)
+ clangpp = os.path.join(install_path, "bin", "clang++" + cmd)
shutil.copy2(clangpp, gpp)
@@ -226,107 +262,140 @@
# A Python invocation running concurrently with make_standalone_toolchain.py
# can create a __pycache__ directory inside the src dir. Avoid copying it,
# because it can be in an inconsistent state.
- shutil.copytree(src, dst, ignore=shutil.ignore_patterns("__pycache__"),
- dirs_exist_ok=True)
+ shutil.copytree(
+ src, dst, ignore=shutil.ignore_patterns("__pycache__"), dirs_exist_ok=True
+ )
def create_toolchain(install_path, arch, api, toolchain_path, host_tag):
"""Create a standalone toolchain."""
copytree(toolchain_path, install_path)
triple = get_triple(arch)
- make_clang_scripts(install_path, arch, api, host_tag == 'windows-x86_64')
- replace_gcc_wrappers(install_path, triple, host_tag == 'windows-x86_64')
+ make_clang_scripts(install_path, arch, api, host_tag == "windows-x86_64")
+ replace_gcc_wrappers(install_path, triple, host_tag == "windows-x86_64")
- prebuilt_path = os.path.join(NDK_DIR, 'prebuilt', host_tag)
+ prebuilt_path = os.path.join(NDK_DIR, "prebuilt", host_tag)
copytree(prebuilt_path, install_path)
def warn_unnecessary(arch, api, host_tag):
"""Emits a warning that this script is no longer needed."""
- if host_tag == 'windows-x86_64':
- ndk_var = '%NDK%'
- prompt = 'C:\\>'
+ if host_tag == "windows-x86_64":
+ ndk_var = "%NDK%"
+ prompt = "C:\\>"
else:
- ndk_var = '$NDK'
- prompt = '$ '
+ ndk_var = "$NDK"
+ prompt = "$ "
target = make_clang_target(get_triple(arch), api)
- standalone_toolchain = os.path.join(ndk_var, 'build', 'tools',
- 'make_standalone_toolchain.py')
- toolchain_dir = os.path.join(ndk_var, 'toolchains', 'llvm', 'prebuilt',
- host_tag, 'bin')
- old_clang = os.path.join('toolchain', 'bin', 'clang++')
- new_clang = os.path.join(toolchain_dir, target + '-clang++')
+ standalone_toolchain = os.path.join(
+ ndk_var, "build", "tools", "make_standalone_toolchain.py"
+ )
+ toolchain_dir = os.path.join(
+ ndk_var, "toolchains", "llvm", "prebuilt", host_tag, "bin"
+ )
+ old_clang = os.path.join("toolchain", "bin", "clang++")
+ new_clang = os.path.join(toolchain_dir, target + "-clang++")
logger().warning(
- textwrap.dedent("""\
- make_standalone_toolchain.py is no longer necessary. The
- {toolchain_dir} directory contains target-specific scripts that perform
- the same task. For example, instead of:
+ textwrap.dedent(
+ """\
+ THIS TOOL IS OBSOLETE. The {toolchain_dir} directory contains
+ target-specific scripts that perform the same task. For example,
+ instead of:
- {prompt}python {standalone_toolchain} \\
- --arch {arch} --api {api} --install-dir toolchain
- {prompt}{old_clang} src.cpp
+ {prompt}python {standalone_toolchain} \\
+ --arch {arch} --api {api} --install-dir toolchain
+ {prompt}{old_clang} src.cpp
- Instead use:
+ Instead use:
- {prompt}{new_clang} src.cpp
- """.format(
- toolchain_dir=toolchain_dir,
- prompt=prompt,
- standalone_toolchain=standalone_toolchain,
- arch=arch,
- api=api,
- old_clang=old_clang,
- new_clang=new_clang)))
+ {prompt}{new_clang} src.cpp
+
+ See https://developer.android.com/ndk/guides/other_build_systems for more
+ details.
+ """.format(
+ toolchain_dir=toolchain_dir,
+ prompt=prompt,
+ standalone_toolchain=standalone_toolchain,
+ arch=arch,
+ api=api,
+ old_clang=old_clang,
+ new_clang=new_clang,
+ )
+ )
+ )
-def get_min_supported_api_level():
- platforms_json = os.path.join(NDK_DIR, "meta/platforms.json")
- with open(platforms_json) as platforms:
- return json.load(platforms)["min"]
+def get_min_supported_api_level(arch: str) -> int:
+ abis_json = Path(NDK_DIR) / "meta/abis.json"
+ with abis_json.open(encoding="utf-8") as abis_file:
+ data = json.load(abis_file)
+ return int(data[arch_to_abi(arch)]["min_os_version"])
def parse_args():
"""Parse command line arguments from sys.argv."""
parser = argparse.ArgumentParser(
- description=inspect.getdoc(sys.modules[__name__]))
+ description=inspect.getdoc(sys.modules[__name__]),
+ # Even when there are invalid arguments, we want to emit the deprecation
+ # warning. We usually wait until after argument parsing to emit that warning so
+ # that we can use the --abi and --api inputs to provide a more complete
+ # replacement example, so to do that in the case of an argument error we need to
+ # catch the error rather than allow it to exit immediately.
+ exit_on_error=False,
+ )
parser.add_argument(
- '--arch', required=True,
- choices=('arm', 'arm64', 'x86', 'x86_64'))
+ "--arch", required=True, choices=("arm", "arm64", "riscv64", "x86", "x86_64")
+ )
parser.add_argument(
- '--api', type=int,
- help='Target the given API version (example: "--api 24").')
+ "--api", type=int, help='Target the given API version (example: "--api 24").'
+ )
parser.add_argument(
- '--stl', help='Ignored. Retained for compatibility until NDK r19.')
+ "--stl", help="Ignored. Retained for compatibility until NDK r19."
+ )
parser.add_argument(
- '--force', action='store_true',
- help='Remove existing installation directory if it exists.')
+ "--force",
+ action="store_true",
+ help="Remove existing installation directory if it exists.",
+ )
parser.add_argument(
- '-v', '--verbose', action='count', help='Increase output verbosity.')
+ "-v", "--verbose", action="count", help="Increase output verbosity."
+ )
def path_arg(arg):
return os.path.realpath(os.path.expanduser(arg))
output_group = parser.add_mutually_exclusive_group()
output_group.add_argument(
- '--package-dir', type=path_arg, default=os.getcwd(),
- help=('Build a tarball and install it to the given directory. If '
- 'neither --package-dir nor --install-dir is specified, a '
- 'tarball will be created and installed to the current '
- 'directory.'))
+ "--package-dir",
+ type=path_arg,
+ default=os.getcwd(),
+ help=(
+ "Build a tarball and install it to the given directory. If "
+ "neither --package-dir nor --install-dir is specified, a "
+ "tarball will be created and installed to the current "
+ "directory."
+ ),
+ )
output_group.add_argument(
- '--install-dir', type=path_arg,
- help='Install toolchain to the given directory instead of packaging.')
+ "--install-dir",
+ type=path_arg,
+ help="Install toolchain to the given directory instead of packaging.",
+ )
return parser.parse_args()
def main():
"""Program entry point."""
- args = parse_args()
+ try:
+ args = parse_args()
+ except argparse.ArgumentError as ex:
+ warn_unnecessary("arm64", "21", get_host_tag_or_die())
+ sys.exit(ex)
if args.verbose is None:
logging.basicConfig(level=logging.WARNING)
@@ -341,17 +410,21 @@
check_ndk_or_die()
- lp32 = args.arch in ('arm', 'x86')
- min_api = get_min_supported_api_level() if lp32 else 21
+ min_api = get_min_supported_api_level(args.arch)
api = args.api
if api is None:
logger().warning(
- 'Defaulting to target API %d (minimum supported target for %s)',
- min_api, args.arch)
+ "Defaulting to target API %d (minimum supported target for %s)",
+ min_api,
+ args.arch,
+ )
api = min_api
elif api < min_api:
- sys.exit('{} is less than minimum platform for {} ({})'.format(
- api, args.arch, min_api))
+ sys.exit(
+ "{} is less than minimum platform for {} ({})".format(
+ api, args.arch, min_api
+ )
+ )
triple = get_triple(args.arch)
toolchain_path = get_toolchain_path_or_die(host_tag)
@@ -360,11 +433,10 @@
install_path = args.install_dir
if os.path.exists(install_path):
if args.force:
- logger().info('Cleaning installation directory %s',
- install_path)
+ logger().info("Cleaning installation directory %s", install_path)
shutil.rmtree(install_path)
else:
- sys.exit('Installation directory already exists. Use --force.')
+ sys.exit("Installation directory already exists. Use --force.")
else:
tempdir = tempfile.mkdtemp()
atexit.register(shutil.rmtree, tempdir)
@@ -373,17 +445,19 @@
create_toolchain(install_path, args.arch, api, toolchain_path, host_tag)
if args.install_dir is None:
- if host_tag == 'windows-x86_64':
- package_format = 'zip'
+ if host_tag == "windows-x86_64":
+ package_format = "zip"
else:
- package_format = 'bztar'
+ package_format = "bztar"
package_basename = os.path.join(args.package_dir, triple)
shutil.make_archive(
- package_basename, package_format,
+ package_basename,
+ package_format,
root_dir=os.path.dirname(install_path),
- base_dir=os.path.basename(install_path))
+ base_dir=os.path.basename(install_path),
+ )
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/docs/BuildSystemMaintainers.md b/docs/BuildSystemMaintainers.md
index 1e659b7..e6cb040 100644
--- a/docs/BuildSystemMaintainers.md
+++ b/docs/BuildSystemMaintainers.md
@@ -23,10 +23,8 @@
The NDK uses the [LLVM] family of tools for building C/C++ code. These include
[Clang] for compilation, [LLD] for linking, and other [LLVM tools] for other
-tasks. Historically [Binutils] was used and remains available during the
-transition but is deprecated and will soon be removed from the NDK.
+tasks.
-[Binutils]: https://www.gnu.org/software/binutils
[Clang]: https://clang.llvm.org/
[LLD]: https://lld.llvm.org/
[LLVM tools]: https://llvm.org/docs/CommandGuide/
@@ -135,7 +133,8 @@
20 should use API 19 for their NDK target.
To programatically determine the list of supported API levels as well as aliases
-that are accepted by ndk-build and CMake, see `<NDK>/meta/platforms.json`.
+that are accepted by ndk-build and CMake, see `<NDK>/meta/platforms.json`. For
+ABI specific minimum supported API levels, see `<NDK>/meta/abis.json`.
Note: In some contexts the API level may be referred to as a platform. In this
document an API level is always an integer, and a platform takes the form of
@@ -168,6 +167,11 @@
being linked were generated from C++ files) and `clang++` should be used
otherwise. Using `clang++` ensures that the C++ standard library is linked.
+When linking a shared library, the `-Wl,-soname,$NAME_OF_LIBRARY` argument is
+required. This is necessary to avoid the problems described in [this stack
+overflow post](https://stackoverflow.com/a/48291044/632035). For example, when
+building `libapp.so`, `-Wl,-soname,libapp.so` must be used.
+
### Target Selection
[Cross-compilation] targets can be selected in one of two ways: by using
@@ -203,35 +207,15 @@
## Linkers
-LLD is the default linker.
-
-Gold is the fallback linker for most architectures, but BFD is used for AArch64
-as Gold previously emitted broken debug information for that architecture (see
-[Issue 70838247] for more details).
-
-The linker used by Clang can be selected with the `-fuse-ld=<linker>` argument,
-passed during linking. For example, to use gold instead of LLD, pass
-`-fuse-ld=gold` when linking. No argument is required to use LLD.
-
-The default linkers are installed to
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/<triple>-ld` and
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/<triple>/bin/ld`. BFD and gold are
-installed as `ld.bfd` or `ld.gold` in the same locations. The triple-prefixed
-executables in the common bin directory should be preferred to the
-triple-specific bin directory because the triple-specific directory will be
-removed when binutils is removed from the NDK.
+The NDK uses LLD for linking. The linker is installed to
+`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/<triple>-ld`.
Note: It is usually not necessary to invoke the linkers directly since Clang
will do so automatically. Clang will also automatically link CRT objects and
default libraries and set up other target-specific options, so it is generally
better to use Clang for linking.
-Warning: Using LLD with GNU `strip` or `objcopy` breaks RelRO. LLVM `strip` and
-`objcopy` must be used with LLD. See [Issue 843] and the [Binutils] section of
-this document for more information.
-
[Issue 70838247]: https://issuetracker.google.com/70838247
-[Issue 843]: https://github.com/android-ndk/ndk/issues/843
## Binutils
@@ -245,19 +229,17 @@
* llvm-readelf
* llvm-strip
+All LLVM tools are capable of handling every target architecture. Unlike Clang,
+no `-target` argument is required for these tools, so they should behave
+correctly when used as drop-in replacements for their GNU equivalents. Some
+tools may optionally accept a `-target` argument, but if omitted they will
+select the correct target based on the input files.
+
Note that `llvm-as` is **not** an equivalent of GNU `as`, but rather a tool for
assembling LLVM IR. If you are currently using `as` directly, you will need to
migrate to using `clang` as a driver for building assembly. See [Clang
Migration Notes] for advice on fixing assembly to be LLVM compatible.
-GNU Binutils remains available up to and including r22. All binutils tools with
-the exception of the assembler (GAS) were removed in r23. GAS was removed in
-r24.
-
-In r22 or earlier, GNU binutils tools are installed to
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/<triple>-<tool>` and
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/<triple>/bin/<tool>`.
-
Note that by default `/usr/bin/as` is used by Clang if the
`-fno-integrated-as` argument is used, which is almost certainly not
what you want!
@@ -427,9 +409,9 @@
than Valgrind (roughly 50% performance compared to an unsanitized application).
To use ASan, pass `-fsanitize=address` when both compiling and linking. The
-sanitizer runtime libraries are installed to
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/lib64/clang/<clang-version>/lib/linux`.
-The library is named `libclang_rt.asan-<arch>-android.so`. This library must be
+sanitizer runtime libraries are installed to `<clang resource dir>/lib/linux`.
+The Clang resource directory is given by `clang -print-resource-dir`. The
+library is named `libclang_rt.asan-<arch>-android.so`. This library must be
included in the APK. A [wrap.sh] file must also be included in the APK. A
premade wrap.sh file for ASan is installed to `<NDK>/wrap.sh`.
@@ -609,15 +591,19 @@
build, resulting in crashes or incorrect behavior at runtime.
The best way to avoid this problem is to ensure all libraries in the application
-were built with NDK r23 or newer.
+were built with NDK r23 or newer, but even libraries built by older NDKs are
+unlikely to have this problem.
-For cases where that is not an option, build systems can ensure that shared
-libraries are always linked **after** static libraries, and explicitly link the
-unwinder between each group. The linker will prefer definitions that appear
-sooner in the link order, so libunwind appearing **before** the shared libraries
-will prevent the linker from considering the incompatible unwinder provided by
-the broken library. libunwind must be linked after other static libraries to
-provide the unwind interface to those static libraries.
+For build systems that want to protect their users against improperly built
+libraries, read on. **Neither ndk-build nor CMake make this effort.**
+
+To protect against improperly built libraries, build systems can ensure that
+shared libraries are always linked **after** static libraries, and explicitly
+link the unwinder between each group. The linker will prefer definitions that
+appear sooner in the link order, so libunwind appearing **before** the shared
+libraries will prevent the linker from considering the incompatible unwinder
+provided by the broken library. libunwind must be linked after other static
+libraries to provide the unwind interface to those static libraries.
The following link order will protect against incorrectly built dependencies:
diff --git a/docs/Building.md b/docs/Building.md
index d6549c3..d26edfc 100644
--- a/docs/Building.md
+++ b/docs/Building.md
@@ -18,13 +18,13 @@
```bash
# For non-Googlers:
-repo init -u https://android.googlesource.com/platform/manifest -b master-ndk
+repo init -u https://android.googlesource.com/platform/manifest -b master-ndk --partial-clone
# Googlers, follow http://go/repo-init/master-ndk (select AOSP in the Host menu,
# and uncheck the box for the git superproject). At time of writing, the correct
# invocation is:
repo init -u \
- sso://android.git.corp.google.com/platform/manifest -b master-ndk
+ sso://android.git.corp.google.com/platform/manifest -b master-ndk --partial-clone
```
If you wish to rebuild a given release of the NDK, the release branches can also
@@ -69,6 +69,9 @@
poetry install
```
+Note: If `poetry install` hangs on Linux, try
+`PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring poetry install`.
+
Spawn a new shell using the virtualenv that Poetry created. You could instead
run NDK commands with the `poetry run` prefix (e.g. `poetry run
./checkbuild.py`), but it's simpler to just spawn a new shell. Plus, if it's in
@@ -78,6 +81,46 @@
poetry shell
```
+### macOS workarounds
+
+On macOS you may not be able to use the Python that is in prebuilts because it
+does not support the ssl module (which poetry itself needs). Until the Python
+prebuilt includes that module, do the following to use a different Python:
+
+First time setup: ensure that you have pyenv installed. You may need to install
+homebrew (http://go/homebrew for Googlers, else https://brew.sh/).
+
+```
+$ brew update && brew upgrade pyenv
+```
+
+Then set up your tree to use the correct version of Python. This setting will
+apply to the directory it is run in, so you will need to do it per NDK tree.
+
+```
+# From the //ndk directory of your NDK tree:
+$ ../prebuilts/python/darwin-x86/bin/python3 --version
+Python 3.10.3
+# We don't need to match the version exactly, just the major/minor version.
+$ pyenv install 3.10:latest
+$ pyenv versions
+# pyenv will list the available Python versions. Use the latest version that
+# matches the prebuilt. In my case that's 3.10.6.
+$ pyenv local 3.10.6
+$ python --version
+Python 3.10.6
+$ poetry env use 3.10
+poetry install
+```
+
+Each time the NDK updates to a new version of Python, you'll need to repeat
+those steps. You may also need to remove the old poetry environment
+(`poetry env list` to get the name, `poetry env remove` to remove it).
+
+`checkbuild.py` and `run_tests.py` will complain when you try to use a Python
+that doesn't come from prebuilts by default. To suppress that, pass
+`--permissive-python-environment` when using those tools in this environment.
+
## Build
### For Linux or Darwin
diff --git a/docs/Testing.md b/docs/Testing.md
index 766ab58..22a9ffa 100644
--- a/docs/Testing.md
+++ b/docs/Testing.md
@@ -297,9 +297,15 @@
tests somewhere, and then run:
```bash
-$ ./run_tests.py path/to/extracted/tests
+$ ./run_tests.py --clean-device path/to/extracted/tests
```
+`--clean-device` is necessary to ensure that the new tests do get pushed to the
+device even if the timestamps on the tests are older than what's currently
+there. If you need to re-run those tests (say, to debug a failing test), you
+will want to omit `--clean-device` for each subsequent run of the same test
+package or each test run will take a very long time.
+
The ndk-tests.tar.bz2 artifact will exist for each of the "linux", "darwin_mac",
and "win64_tests" targets. All of them must be downloaded and run. Running only
the tests from the linux build will not verify that the windows or darwin NDKs
diff --git a/docs/Toolchains.md b/docs/Toolchains.md
index 158d272..0bae4f6 100644
--- a/docs/Toolchains.md
+++ b/docs/Toolchains.md
@@ -44,7 +44,7 @@
```bash
# Edit ndk/toolchains.py and update `CLANG_VERSION`.
-$ ./checkbuid.py
+$ ./checkbuild.py
# ./run_tests.py
```
diff --git a/docs/changelogs/Changelog-r25.md b/docs/changelogs/Changelog-r25.md
index ef0c538..474a331 100644
--- a/docs/changelogs/Changelog-r25.md
+++ b/docs/changelogs/Changelog-r25.md
@@ -20,6 +20,24 @@
[Issue 1751]: https://github.com/android/ndk/issues/1751
+## r25c
+
+* Updated LLVM to clang-r450784d1, based on LLVM 14 development.
+ * [Issue 1797]: Fixed LLDB handling of forward slashes in absolute paths on
+ Windows.
+ * [Issue 1832]: Improvements to aarch64 vector code generation.
+* [Issue 1813]: `-Wl,--gc-sections` is no longer set by default for debug
+ builds. This behavior was removed because it could cause the linker to remove
+ functions that may be useful to evaluate during debugging. The new CMake
+ toolchain file (`-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF`, not the default
+ behavior) does not include this fix because it requires a CMake fix first.
+* [Issue 1757]: Updated simpleperf. Includes fix for signing denial when run on
+ macOS.
+
+[Issue 1797]: https://github.com/android/ndk/issues/1797
+[Issue 1813]: https://github.com/android/ndk/issues/1813
+[Issue 1832]: https://github.com/android/ndk/issues/1832
+
## r25b
* [Issue 1739]: Fixed C compatibility issue in `amidi/AMidi.h`.
diff --git a/docs/changelogs/Changelog-r26.md b/docs/changelogs/Changelog-r26.md
index 6ac8c16..d9c194a 100644
--- a/docs/changelogs/Changelog-r26.md
+++ b/docs/changelogs/Changelog-r26.md
@@ -15,11 +15,59 @@
* KitKat (APIs 19 and 20) is no longer supported. The minimum OS supported by
the NDK is Lollipop (API level 21). See [Issue 1751] for details.
+* libc++ has been updated. The NDK's libc++ now comes directly from our LLVM
+ toolchain, so every future LLVM update is also a libc++ update. Future
+ changelogs will not explicitly mention libc++ updates.
-[Issue 1751]: https://github.com/android/nßdk/issues/1751
+[Issue 1751]: https://github.com/android/ndk/issues/1751
+
+## r26b
+
+* Updated LLVM to clang-r487747d. See `AndroidVersion.txt` and
+ `clang_source_info.md` in the toolchain directory for version information.
+ * This update was intended to be included in r26 RC 1. The original release
+ noted these fixes in the changelog, but the new toolchain had not actually
+ been included.
+ * [Issue 1907]: HWASan linker will be used automatically for
+ `minSdkVersion 34` or higher.
+ * [Issue 1909]: Fixed ABI mismatch between function-multi-versioning and ifunc
+ resolvers.
+* [Issue 1938]: Fixed ndk-stack to use the correct path for llvm-symbolizer and
+ other tools.
+
+[Issue 1907]: https://github.com/android/ndk/issues/1907
+[Issue 1909]: https://github.com/android/ndk/issues/1909
+[Issue 1938]: https://github.com/android/ndk/issues/1938
## Changes
+* Updated LLVM to clang-r487747c. See `AndroidVersion.txt` and
+ `clang_source_info.md` in the toolchain directory for version information.
+ * Clang now treats `-Wimplicit-function-declaration` as an error rather than a
+ warning in C11 and newer. Clang's default C standard is 17, so this is a
+ change in default behavior compared to older versions of Clang, but is the
+ behavior defined by C99.
+
+ If you encounter these errors when upgrading, you most likely forgot an
+ `#include`. If you cannot (or do not want to) fix those issues, you can
+ revert to the prior behavior with
+ `-Wno-error=implicit-function-declaration`.
+
+ C++ users are unaffected. This has never been allowed in C++.
+
+ See https://reviews.llvm.org/D122983 for more details.
+ * [Issue 1298]: Fixed seccomp error with ASan on x86_64 devices.
+ * [Issue 1530]: Updated libc++ to match LLVM version.
+ * [Issue 1565]: Fixed lldb ncurses issue with terminal database on Darwin.
+ * [Issue 1677]: Fixed Clang crash in optimizer.
+ * [Issue 1679]: Clang will now automatically enable ELF TLS for
+ `minSdkVersion 29` or higher.
+ * [Issue 1834]: Fixed Clang crash during SVE conversions.
+ * [Issue 1860]: Fixed miscompilation affecting armv7.
+ * [Issue 1861]: Fixed front end crash in Clang.
+ * [Issue 1862]: Fixed Clang crash for aarch64 with `-Os`.
+ * [Issue 1880]: Fixed crash in clang-format.
+ * [Issue 1883]: Fixed crash when incorrectly using neon intrinsics.
* Version scripts that name public symbols that are not present in the library
will now emit an error by default for ndk-build and the CMake toolchain file.
Build failures caused by this error are likely a bug in your library or a
@@ -28,6 +76,41 @@
`LOCAL_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS := true` in your `Android.mk`
file. For other build systems, see the secion titled "Version script
validation" in the [build system maintainers guide].
+* [Issue 873]: Weak symbols for API additions is supported. Provide
+ `__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__` as an option.
+* [Issue 1400]: NDK paths with spaces will now be diagnosed by ndk-build on
+ Windows. This has never been supported for any OS, but the error message
+ wasn't previously working on Windows either.
+* [Issue 1764]: Fixed Python 3 incompatibility when using `ndk-gdb` with `-f`.
+* [Issue 1803]: Removed useless `strtoq` and `strtouq` from the libc stub
+ libraries. These were never exposed in the header files, but could confuse
+ some autoconf like systems.
+* [Issue 1852]: Fixed ODR issue in linux/time.h.
+* [Issue 1878]: Fixed incorrect definition of `WIFSTOPPED`.
+* ndk-build now uses clang rather than clang++ when linking modules that do not
+ have C++ sources. There should not be any observable behavior differences
+ because ndk-build previously handled the C/C++ linking differences itself.
+* ndk-build now delegates C++ stdlib linking to the Clang driver. It is unlikely
+ that this will cause any observable behavior change, but any new behavior will
+ more closely match CMake and other build systems.
+
+[Issue 837]: https://github.com/android/ndk/issues/837
+[Issue 1298]: https://github.com/android/ndk/issues/1298
+[Issue 1400]: https://github.com/android/ndk/issues/1400
+[Issue 1530]: https://github.com/android/ndk/issues/1530
+[Issue 1565]: https://github.com/android/ndk/issues/1565
+[Issue 1677]: https://github.com/android/ndk/issues/1677
+[Issue 1679]: https://github.com/android/ndk/issues/1679
+[Issue 1764]: https://github.com/android/ndk/issues/1764
+[Issue 1803]: https://github.com/android/ndk/issues/1803
+[Issue 1834]: https://github.com/android/ndk/issues/1834
+[Issue 1852]: https://github.com/android/ndk/issues/1852
+[Issue 1860]: https://github.com/android/ndk/issues/1860
+[Issue 1861]: https://github.com/android/ndk/issues/1861
+[Issue 1862]: https://github.com/android/ndk/issues/1862
+[Issue 1878]: https://github.com/android/ndk/issues/1878
+[Issue 1880]: https://github.com/android/ndk/issues/1880
+[Issue 1883]: https://github.com/android/ndk/issues/1883
## Known Issues
diff --git a/docs/changelogs/Changelog-r27.md b/docs/changelogs/Changelog-r27.md
new file mode 100644
index 0000000..2b6e23c
--- /dev/null
+++ b/docs/changelogs/Changelog-r27.md
@@ -0,0 +1,70 @@
+# Changelog
+
+Report issues to [GitHub].
+
+For Android Studio issues, follow the docs on the [Android Studio site].
+
+If you're a build system maintainer that needs to use the tools in the NDK
+directly, see the [build system maintainers guide].
+
+[GitHub]: https://github.com/android/ndk/issues
+[Android Studio site]: http://tools.android.com/filing-bugs
+[build system maintainers guide]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
+
+## Announcements
+
+## Changes
+
+* Updated LLVM to clang-r498229b. See `AndroidVersion.txt` and
+ `clang_source_info.md` in the toolchain directory for version information.
+* A RISC-V sysroot (AKA riscv64, or rv64) has been added. It is **not**
+ supported. It is present to aid bringup for OS vendors, but it's not yet a
+ supported Android ABI. It will not be built by default.
+* [Issue 1856]: Target-prefixed cmd wrappers for clang should now behave
+ appropriately when the first argument includes quotes. **You probably do not
+ need to use those wrappers.** In most cases where you would use
+ `aarch64-linux-android21-clang`, you can instead use `clang -target
+ aarch64-linux-android21`, e.g. `CC="clang -target aarch64-linux-android21"
+ ./configure`. The wrappers are only needed when working with systems that do
+ not properly handle a `CC` that includes arguments.
+* [Issue 1898]: ndk-stack now tolerates 0x prefixed addresses.
+* [Issue 1921]: `ANDROID_USE_LEGACY_TOOLCHAIN_FILE` value is now preserved
+ during try-compile steps when `ON`.
+* [Issue 1974]: Unintentionally shipped Vulkan headers have been removed from
+ `sources/third_party/vulkan`. The standard Vulkan headers are included in the
+ Android sysroot, which Clang will find automatically.
+
+[Issue 1856]: https://github.com/android/ndk/issues/1856
+[Issue 1898]: https://github.com/android/ndk/issues/1898
+[Issue 1921]: https://github.com/android/ndk/issues/1921
+[Issue 1974]: https://github.com/android/ndk/issues/1974
+
+
+## Known Issues
+
+This is not intended to be a comprehensive list of all outstanding bugs.
+
+* [Issue 360]: `thread_local` variables with non-trivial destructors will cause
+ segfaults if the containing library is `dlclose`ed. This was fixed in API 28,
+ but code running on devices older than API 28 will need a workaround. The
+ simplest fix is to **stop calling `dlclose`**. If you absolutely must continue
+ calling `dlclose`, see the following table:
+
+ | | Pre-API 23 | APIs 23-27 | API 28+ |
+ | ----------------- | -------------------- | ------------- | ------- |
+ | No workarounds | Works for static STL | Broken | Works |
+ | `-Wl,-z,nodelete` | Works for static STL | Works | Works |
+ | No `dlclose` | Works | Works | Works |
+
+ If your code must run on devices older than M (API 23) and you cannot use the
+ static STL (common), **the only fix is to not call `dlclose`**, or to stop
+ using `thread_local` variables with non-trivial destructors.
+
+ If your code does not need to run on devices older than API 23 you can link
+ with `-Wl,-z,nodelete`, which instructs the linker to ignore `dlclose` for
+ that library. You can backport this behavior by not calling `dlclose`.
+
+ The fix in API 28 is the standardized inhibition of `dlclose`, so you can
+ backport the fix to older versions by not calling `dlclose`.
+
+[Issue 360]: https://github.com/android/ndk/issues/360
diff --git a/meta/abis.json b/meta/abis.json
index 48462dc..665a768 100644
--- a/meta/abis.json
+++ b/meta/abis.json
@@ -17,6 +17,15 @@
"triple": "aarch64-linux-android",
"llvm_triple": "aarch64-none-linux-android"
},
+ "riscv64": {
+ "bitness": 64,
+ "default": false,
+ "deprecated": false,
+ "proc": "riscv64",
+ "arch": "riscv64",
+ "triple": "riscv64-linux-android",
+ "llvm_triple": "riscv64-none-linux-android"
+ },
"x86": {
"bitness": 32,
"default": true,
diff --git a/meta/platforms.json b/meta/platforms.json
index 824bb80..498afa0 100644
--- a/meta/platforms.json
+++ b/meta/platforms.json
@@ -1,6 +1,6 @@
{
"min": 21,
- "max": 33,
+ "max": 35,
"aliases": {
"20": 19,
"25": 24,
@@ -20,6 +20,8 @@
"R": 30,
"S": 31,
"Sv2": 32,
- "Tiramisu": 33
+ "Tiramisu": 33,
+ "UpsideDownCake": 34,
+ "VanillaIceCream": 35
}
}
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index 50e21d5..0000000
--- a/mypy.ini
+++ /dev/null
@@ -1,33 +0,0 @@
-[mypy]
-check_untyped_defs = True
-# TODO: Investigate fixing type signatures for mocks.
-# disallow_any_decorated = True
-# This flags a *lot* of things since things like json.load return Any.
-# disallow_any_expr = True
-disallow_any_generics = True
-disallow_untyped_decorators = True
-disallow_untyped_defs = True
-follow_imports = silent
-implicit_reexport = False
-namespace_packages = True
-no_implicit_optional = True
-show_error_codes = True
-strict_equality = True
-warn_redundant_casts = True
-warn_return_any = True
-warn_unreachable = True
-warn_unused_configs = True
-warn_unused_ignores = True
-# TODO: Add type information to the adb module so we can enable these.
-# disallow_any_unimported = True
-# disallow_subclassing_any = True
-exclude=(?x)(
- ^tests/|
- ^build/tools/make_standalone_toolchain.py$|
- ^parse_elfnote.py$|
- ^scripts/update_dac.py$|
- ^scripts/gen_release_table.py$|
- ^scripts/create_windows_instance.py$)
-
-[mypy-adb]
-ignore_missing_imports = True
diff --git a/ndk/abis.py b/ndk/abis.py
index a710d9e..18dfad7 100644
--- a/ndk/abis.py
+++ b/ndk/abis.py
@@ -14,10 +14,10 @@
# limitations under the License.
#
"""Constants and helper functions for NDK ABIs."""
-from typing import List, NewType, Optional
+from collections.abc import Iterator
+from typing import NewType, Optional
-from .platforms import FIRST_LP64_API_LEVEL, MIN_API_LEVEL
-
+from .platforms import FIRST_LP64_API_LEVEL, FIRST_RISCV64_API_LEVEL, MIN_API_LEVEL
Arch = NewType("Arch", str)
Abi = NewType("Abi", str)
@@ -32,6 +32,7 @@
LP64_ABIS = (
Abi("arm64-v8a"),
+ Abi("riscv64"),
Abi("x86_64"),
)
@@ -42,78 +43,52 @@
ALL_ARCHITECTURES = (
Arch("arm"),
Arch("arm64"),
+ Arch("riscv64"),
Arch("x86"),
Arch("x86_64"),
)
-ALL_TOOLCHAINS = (
- Toolchain("arm-linux-androideabi"),
- Toolchain("aarch64-linux-android"),
- Toolchain("x86"),
- Toolchain("x86_64"),
-)
-
-
ALL_TRIPLES = (
"arm-linux-androideabi",
"aarch64-linux-android",
+ "riscv64-linux-android",
"i686-linux-android",
"x86_64-linux-android",
)
-def arch_to_toolchain(arch: Arch) -> Toolchain:
- """Returns the NDK toolchain name for the given architecture."""
- return dict(zip(ALL_ARCHITECTURES, ALL_TOOLCHAINS))[arch]
-
-
def arch_to_triple(arch: Arch) -> str:
"""Returns the triple for the given architecture."""
return dict(zip(ALL_ARCHITECTURES, ALL_TRIPLES))[arch]
-def toolchain_to_arch(toolchain: Toolchain) -> Arch:
- """Returns the architecture for the given toolchain."""
- return dict(zip(ALL_TOOLCHAINS, ALL_ARCHITECTURES))[toolchain]
-
-
-def arch_to_abis(arch: Arch) -> List[Abi]:
- """Returns the ABIs for the given architecture."""
- return {
- Arch("arm"): [Abi("armeabi-v7a")],
- Arch("arm64"): [Abi("arm64-v8a")],
- Arch("x86"): [Abi("x86")],
- Arch("x86_64"): [Abi("x86_64")],
- }[arch]
-
-
def abi_to_arch(abi: Abi) -> Arch:
"""Returns the architecture for the given ABI."""
return {
Abi("armeabi-v7a"): Arch("arm"),
Abi("arm64-v8a"): Arch("arm64"),
+ Abi("riscv64"): Arch("riscv64"),
Abi("x86"): Arch("x86"),
Abi("x86_64"): Arch("x86_64"),
}[abi]
-def clang_target(arch: Arch, api: Optional[int] = None) -> str:
- """Returns the Clang target to be used for the given arch/API combo.
+def abi_to_triple(abi: Abi) -> str:
+ """Returns the triple for the given ABI."""
+ return arch_to_triple(abi_to_arch(abi))
- Args:
- arch: Architecture to compile for. 'arm' will target ARMv7.
- api: API level to compile for. Defaults to the lowest supported API
- level for the architecture if None.
+
+def clang_target(abi: Abi, api: Optional[int] = None) -> str:
+ """Returns the Clang target to be used for the given ABI/API combo.
+
+ api: API level to compile for. Defaults to the lowest supported API
+ level for the architecture if None.
"""
if api is None:
- # Currently there is only one ABI per arch.
- abis = arch_to_abis(arch)
- assert len(abis) == 1
- abi = abis[0]
api = min_api_for_abi(abi)
- triple = arch_to_triple(arch)
- if arch == "arm":
+ triple = abi_to_triple(abi)
+ if abi == Abi("armeabi-v7a"):
triple = "armv7a-linux-androideabi"
return f"{triple}{api}"
@@ -132,8 +107,17 @@
...
ValueError: Invalid ABI: foobar
"""
+ if abi == Abi("riscv64"):
+ return FIRST_RISCV64_API_LEVEL
if abi in LP64_ABIS:
return FIRST_LP64_API_LEVEL
if abi in LP32_ABIS:
return MIN_API_LEVEL
raise ValueError("Invalid ABI: {}".format(abi))
+
+
+def iter_abis_for_api(api: int) -> Iterator[Abi]:
+ """Returns an Iterator over ABIs available at the given API level."""
+ for abi in ALL_ABIS:
+ if min_api_for_abi(abi) <= api:
+ yield abi
diff --git a/ndk/ansi.py b/ndk/ansi.py
index 5ef4990..b32d1e6 100644
--- a/ndk/ansi.py
+++ b/ndk/ansi.py
@@ -14,8 +14,7 @@
# limitations under the License.
#
"""ANSI terminal control."""
-from __future__ import absolute_import
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import contextlib
import os
diff --git a/ndk/archive.py b/ndk/archive.py
index d0e3774..091b011 100644
--- a/ndk/archive.py
+++ b/ndk/archive.py
@@ -15,13 +15,13 @@
#
"""Helper functions for reading and writing .zip and .tar.bz2 archives."""
import os
-from pathlib import Path
import shutil
import subprocess
+from pathlib import Path
from typing import List
-from ndk.hosts import Host
import ndk.paths
+from ndk.hosts import Host
def make_bztar(base_name: Path, root_dir: Path, base_dir: Path) -> None:
diff --git a/ndk/autoconf.py b/ndk/autoconf.py
index e198891..8f0ea9b 100644
--- a/ndk/autoconf.py
+++ b/ndk/autoconf.py
@@ -16,18 +16,17 @@
"""APIs for dealing with autoconf scripts."""
import multiprocessing
import os
-from pathlib import Path
import pipes
import pprint
import shutil
import subprocess
+from pathlib import Path
from typing import ContextManager, Dict, List, Optional
import ndk.ext.os
-from ndk.hosts import Host, get_default_host
import ndk.paths
import ndk.toolchains
-
+from ndk.hosts import Host, get_default_host
HOST_TRIPLE_MAP = {
Host.Darwin: "x86_64-apple-darwin",
diff --git a/ndk/builds.py b/ndk/builds.py
index deb412a..c085faa 100644
--- a/ndk/builds.py
+++ b/ndk/builds.py
@@ -19,18 +19,20 @@
"""
from __future__ import annotations
-from enum import auto, Enum, unique
-from pathlib import Path, PureWindowsPath
import shutil
import stat
import subprocess
+import sys
import textwrap
+import zipapp
+from enum import Enum, auto, unique
+from pathlib import Path, PureWindowsPath
from typing import Any, Dict, Iterator, List, Optional, Set
+import ndk.paths
from ndk.autoconf import AutoconfBuilder
from ndk.cmake import CMakeBuilder
from ndk.hosts import Host
-import ndk.paths
class ModuleValidateError(RuntimeError):
@@ -58,7 +60,7 @@
dist_dir: Path,
modules: List[Module],
host: Host,
- build_number: str,
+ build_number: int,
) -> None:
self.out_dir = out_dir
self.dist_dir = dist_dir
@@ -403,28 +405,6 @@
shutil.copy2(self.src, install_path)
-class MultiFileModule(Module):
- """A module that installs multiple files to the NDK.
-
- This is similar to FileModule, but allows multiple files to be installed
- with a single module.
- """
-
- @property
- def files(self) -> Iterator[Path]:
- """List of absolute paths to files to be installed."""
- yield from []
-
- def build(self) -> None:
- pass
-
- def install(self) -> None:
- install_dir = self.get_install_path()
- install_dir.mkdir(parents=True, exist_ok=True)
- for file_path in self.files:
- shutil.copy2(file_path, install_dir)
-
-
class ScriptShortcutModule(Module):
"""A module that installs a shortcut to another script in the NDK.
@@ -443,6 +423,8 @@
# These are all trivial shell scripts that we generated. No notice needed.
no_notice = True
+ disallow_windows_install_path_with_spaces: bool = False
+
def validate(self) -> None:
super().validate()
@@ -463,14 +445,19 @@
script = self.get_script_path().with_suffix(self.windows_ext)
install_path = self.get_install_path().with_suffix(".cmd")
- install_path.write_text(
- textwrap.dedent(
- f"""\
- @echo off
- %~dp0{PureWindowsPath(script)} %*
+ text = "@echo off\n"
+ if self.disallow_windows_install_path_with_spaces:
+ text += textwrap.dedent(
+ """\
+ rem https://stackoverflow.com/a/29057742/632035
+ for /f "tokens=2" %%a in ("%~dp0") do (
+ echo ERROR: NDK path cannot contain spaces
+ exit /b 1
+ )
"""
)
- )
+ text += f"%~dp0{PureWindowsPath(script)} %*"
+ install_path.write_text(text)
def make_sh_helper(self) -> None:
"""Makes a bash helper script for POSIX systems."""
@@ -519,6 +506,126 @@
pass
+class PythonApplication(Module):
+ """A PEP 441 Python Zip Application.
+
+ https://peps.python.org/pep-0441/
+
+ A Python Zip Application is a zipfile of a Python package with an entry point that
+ is runnable by the Python interpreter. PythonApplication will create a the pyz
+ application with its bundled dependencies and a launcher script that will invoke it
+ using the NDK's bundled Python interpreter.
+ """
+
+ package: Path
+ pip_dependencies: list[Path] = []
+ copy_to_python_path: list[Path] = []
+ main: str
+
+ def build(self) -> None:
+ if self._staging.exists():
+ shutil.rmtree(self._staging)
+ self._staging.mkdir(parents=True)
+
+ if self.package.is_file():
+ shutil.copy(self.package, self._staging / self.package.name)
+ (self._staging / "__init__.py").touch()
+ else:
+ shutil.copytree(self.package, self._staging / self.package.name)
+
+ for path in self.copy_to_python_path:
+ if path.is_file():
+ shutil.copy(path, self._staging / path.name)
+ else:
+ shutil.copytree(path, self._staging / path.name)
+
+ if self.pip_dependencies:
+ # Apparently pip doesn't want us to use it as a library.
+ # https://pip.pypa.io/en/latest/user_guide/#using-pip-from-your-program
+ subprocess.run(
+ [
+ sys.executable,
+ "-m",
+ "pip",
+ "install",
+ "--target",
+ self._staging,
+ *self.pip_dependencies,
+ ],
+ check=True,
+ )
+
+ zipapp.create_archive(
+ source=self._staging,
+ target=self._pyz_build_location,
+ main=self.main,
+ filter=self.zipapp_file_filter,
+ )
+
+ @staticmethod
+ def zipapp_file_filter(path: Path) -> bool:
+ if ".git" in path.parts:
+ return False
+ if "__pycache__" in path.parts:
+ return False
+ if ".mypy_cache" in path.parts:
+ return False
+ if ".pytest_cache" in path.parts:
+ return False
+ if path.suffix in {".pyc", ".pyo"}:
+ return False
+ return True
+
+ def install(self) -> None:
+ install_path = self.get_install_path()
+ install_path.parent.mkdir(parents=True, exist_ok=True)
+ shutil.copy(self._pyz_build_location, install_path)
+ self.create_launcher()
+
+ def create_launcher(self) -> None:
+ if self.host is Host.Windows64:
+ self.create_cmd_launcher()
+ else:
+ self.create_bash_launcher()
+
+ def create_cmd_launcher(self) -> None:
+ self.get_install_path().with_name(f"{self.name}.cmd").write_text(
+ textwrap.dedent(
+ f"""\
+ @echo off
+ setlocal
+ set ANDROID_NDK_PYTHON=%~dp0..\\..\\..\\toolchains\\llvm\\prebuilt\\windows-x86_64\\python3\\python.exe
+ set SHELL=cmd
+ "%ANDROID_NDK_PYTHON%" -u "%~dp0{self.get_install_path().name}" %*
+ """
+ )
+ )
+
+ def create_bash_launcher(self) -> None:
+ launcher = self.get_install_path().with_name(self.name)
+ launcher.write_text(
+ textwrap.dedent(
+ f"""\
+ #!/usr/bin/env bash
+ THIS_DIR=$(cd "$(dirname "$0")" && pwd)
+ ANDROID_NDK_ROOT=$(cd "$THIS_DIR/../../.." && pwd)
+ . "$ANDROID_NDK_ROOT/build/tools/ndk_bin_common.sh"
+ "$ANDROID_NDK_PYTHON" "$THIS_DIR/{self.get_install_path().name}" "$@"
+ """
+ )
+ )
+ mode = launcher.stat().st_mode
+ launcher.chmod(mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+
+ @property
+ def _staging(self) -> Path:
+ return self.intermediate_out_dir / self.name
+
+ @property
+ def _pyz_build_location(self) -> Path:
+ return self.intermediate_out_dir / self.get_install_path().name
+
+
class LintModule(Module):
def build(self) -> None:
self.run()
diff --git a/ndk/checkbuild.py b/ndk/checkbuild.py
index fb7af36..3af79b9 100755
--- a/ndk/checkbuild.py
+++ b/ndk/checkbuild.py
@@ -23,14 +23,11 @@
import collections
import contextlib
import copy
-
import inspect
import json
import logging
import multiprocessing
import os
-from pathlib import Path
-import pipes
import re
import shutil
import site
@@ -39,6 +36,8 @@
import sys
import textwrap
import traceback
+from collections.abc import Sequence
+from pathlib import Path
from typing import (
Any,
Callable,
@@ -50,7 +49,6 @@
Set,
TextIO,
Tuple,
- Union,
)
import ndk.abis
@@ -61,23 +59,26 @@
import ndk.cmake
import ndk.config
import ndk.deps
-import ndk.file
-from ndk.hosts import Host
import ndk.notify
import ndk.paths
-from ndk.paths import ANDROID_DIR, NDK_DIR
-from ndk.platforms import MIN_API_LEVEL
import ndk.test.builder
import ndk.test.printers
import ndk.test.spec
import ndk.timer
-from ndk.toolchains import ClangToolchain, CLANG_VERSION
import ndk.ui
import ndk.workqueue
+from ndk.abis import ALL_ABIS, Abi
+from ndk.crtobjectbuilder import CrtObjectBuilder
+from ndk.hosts import Host
+from ndk.paths import ANDROID_DIR, NDK_DIR, PREBUILT_SYSROOT
+from ndk.platforms import ALL_API_LEVELS, API_LEVEL_ALIASES, MAX_API_LEVEL
+from ndk.toolchains import CLANG_VERSION, ClangToolchain
+
+from .ndkversionheadergenerator import NdkVersionHeaderGenerator
from .pythonenv import ensure_python_environment
-def get_version_string(build_number: str) -> str:
+def get_version_string(build_number: int) -> str:
"""Returns the version string for the current build."""
return f"{ndk.config.major}.{ndk.config.hotfix}.{build_number}"
@@ -174,7 +175,11 @@
def make_app_bundle(
- zip_path: Path, ndk_dir: Path, build_number: str, build_dir: Path
+ worker: ndk.workqueue.Worker,
+ zip_path: Path,
+ ndk_dir: Path,
+ build_number: int,
+ build_dir: Path,
) -> None:
"""Builds a macOS App Bundle of the NDK.
@@ -195,6 +200,7 @@
ndk_dir: The path to the NDK being bundled.
build_dir: The path to the top level build directory.
"""
+ worker.status = "Packaging MacOS App Bundle"
package_dir = build_dir / "bundle"
app_directory_name = f"AndroidNDK{build_number}.app"
bundle_dir = package_dir / app_directory_name
@@ -221,8 +227,34 @@
)
+def make_brtar(
+ worker: ndk.workqueue.Worker,
+ base_name: Path,
+ root_dir: Path,
+ base_dir: Path,
+ preserve_symlinks: bool,
+) -> None:
+ worker.status = "Packaging .tar.br"
+ ndk.archive.make_brtar(
+ base_name, root_dir, base_dir, preserve_symlinks=preserve_symlinks
+ )
+
+
+def make_zip(
+ worker: ndk.workqueue.Worker,
+ base_name: Path,
+ root_dir: Path,
+ paths: List[str],
+ preserve_symlinks: bool,
+) -> None:
+ worker.status = "Packaging .zip"
+ ndk.archive.make_zip(
+ base_name, root_dir, paths, preserve_symlinks=preserve_symlinks
+ )
+
+
def package_ndk(
- ndk_dir: Path, out_dir: Path, dist_dir: Path, host: Host, build_number: str
+ ndk_dir: Path, out_dir: Path, dist_dir: Path, host: Host, build_number: int
) -> Path:
"""Packages the built NDK for distribution.
@@ -238,24 +270,36 @@
purge_unwanted_files(ndk_dir)
- if host == Host.Darwin:
- bundle_name = f"android-ndk-{build_number}-app-bundle"
- bundle_path = dist_dir / bundle_name
- make_app_bundle(bundle_path, ndk_dir, build_number, out_dir)
+ workqueue: ndk.workqueue.WorkQueue = ndk.workqueue.WorkQueue()
+ try:
+ if host == Host.Darwin:
+ workqueue.add_task(
+ make_app_bundle,
+ dist_dir / f"android-ndk-{build_number}-app-bundle",
+ ndk_dir,
+ build_number,
+ out_dir,
+ )
+ workqueue.add_task(
+ make_brtar,
+ package_path,
+ ndk_dir.parent,
+ Path(ndk_dir.name),
+ preserve_symlinks=(host != Host.Windows64),
+ )
+ workqueue.add_task(
+ make_zip,
+ package_path,
+ ndk_dir.parent,
+ [ndk_dir.name],
+ preserve_symlinks=(host != Host.Windows64),
+ )
+ ndk.ui.finish_workqueue_with_ui(workqueue, ndk.ui.get_build_progress_ui)
+ finally:
+ workqueue.terminate()
+ workqueue.join()
# TODO: Treat the .tar.br archive as authoritative and return its path.
- # TODO: Create archives in parallel.
- ndk.archive.make_brtar(
- package_path,
- ndk_dir.parent,
- Path(ndk_dir.name),
- preserve_symlinks=(host != Host.Windows64),
- )
- return ndk.archive.make_zip(
- package_path,
- ndk_dir.parent,
- [ndk_dir.name],
- preserve_symlinks=(host != Host.Windows64),
- )
+ return package_path.with_suffix(".zip")
def build_ndk_tests(out_dir: Path, dist_dir: Path, args: argparse.Namespace) -> bool:
@@ -300,7 +344,7 @@
# Write out the result to logs/build_error.log so we can find the
# failure easily on the build server.
log_path = dist_dir / "logs" / "build_error.log"
- with open(log_path, "a") as error_log:
+ with log_path.open("a", encoding="utf-8") as error_log:
error_log_printer = ndk.test.printers.FilePrinter(error_log)
error_log_printer.print_summary(report)
@@ -426,7 +470,7 @@
(bin_dir / f"lld{bin_ext}").unlink()
(bin_dir / f"lld-link{bin_ext}").unlink()
- install_clanglib = install_path / "lib64/clang"
+ install_clanglib = install_path / "lib/clang"
linux_prebuilt_path = ClangToolchain.path_for_host(Host.Linux)
# Remove unused python scripts. They are not installed for Windows.
@@ -444,25 +488,28 @@
for pyfile in python_bin_dir.glob(file_pattern):
pyfile.unlink()
- # Remove lldb-argdumper in site-packages. libc++ is not available there.
- # People should use bin/lldb-argdumper instead.
- for pylib in (install_path / "lib").glob("python*"):
- (pylib / f"site-packages/lldb/lldb-argdumper{bin_ext}").unlink()
-
if self.host != Host.Linux:
- # We don't build target binaries as part of the Darwin or Windows
- # build. These toolchains need to get these from the Linux
- # prebuilts.
+ # We don't build target binaries as part of the Darwin or Windows build.
+ # These toolchains need to get these from the Linux prebuilts.
#
- # The headers and libraries we care about are all in lib64/clang
- # for both toolchains, and those two are intended to be identical
- # between each host, so we can just replace them with the one from
- # the Linux toolchain.
+ # The headers and libraries we care about are all in lib/clang for both
+ # toolchains, and those two are intended to be identical between each host,
+ # so we can just replace them with the one from the Linux toolchain.
shutil.rmtree(install_clanglib)
- shutil.copytree(linux_prebuilt_path / "lib64/clang", install_clanglib)
+ shutil.copytree(
+ linux_prebuilt_path / "lib/clang",
+ install_clanglib,
+ symlinks=self.host is not Host.Windows64,
+ )
- # The Clang prebuilts have the platform toolchain libraries in
- # lib64/clang. The libraries we want are in runtimes_ndk_cxx.
+ # The toolchain build creates a symlink to easy migration across versions in the
+ # platform build. It's just confusing and wasted space in the NDK. Purge it.
+ for path in install_clanglib.iterdir():
+ if path.is_symlink():
+ path.unlink()
+
+ # The Clang prebuilts have the platform toolchain libraries in lib/clang. The
+ # libraries we want are in runtimes_ndk_cxx.
ndk_runtimes = linux_prebuilt_path / "runtimes_ndk_cxx"
for version_dir in install_clanglib.iterdir():
dst_lib_dir = version_dir / "lib/linux"
@@ -477,6 +524,7 @@
subdir = {
ndk.abis.Arch("arm"): "arm",
ndk.abis.Arch("arm64"): "aarch64",
+ ndk.abis.Arch("riscv64"): "riscv64",
ndk.abis.Arch("x86"): "i386",
ndk.abis.Arch("x86_64"): "x86_64",
}[arch]
@@ -489,24 +537,27 @@
)
)
- # Also remove the other libraries that we installed, but they were only
- # installed on Linux.
+ # Remove duplicate install locations of some runtime libraries. The toolchain
+ # artifacts install these to a location the driver doesn't search. We relocate
+ # these as necessary (either in this class or in Toolchain), so clean up the
+ # excess. The Android runtimes are only packaged in the Linux toolchain.
if self.host == Host.Linux:
shutil.rmtree(install_path / "runtimes_ndk_cxx")
+ shutil.rmtree(install_path / "android_libc++")
# Remove CMake package files that should not be exposed.
# For some reason the LLVM install includes CMake modules that expose
# its internal APIs. We want to purge these so apps don't accidentally
# depend on them. See http://b/142327416 for more info.
- shutil.rmtree(install_path / "lib64/cmake")
+ shutil.rmtree(install_path / "lib/cmake")
# Remove libc++.a and libc++abi.a on Darwin. Now that these files are
# universal binaries, they break notarization. Maybe it is possible to
# fix notarization by using ditto to preserve APFS extended attributes.
# See https://developer.apple.com/forums/thread/126038.
if self.host == Host.Darwin:
- (install_path / "lib64/libc++.a").unlink()
- (install_path / "lib64/libc++abi.a").unlink()
+ (install_path / "lib/libc++.a").unlink()
+ (install_path / "lib/libc++abi.a").unlink()
# Strip some large binaries and libraries. This is awkward, hand-crafted
# logic to select most of the biggest offenders, but could be
@@ -535,6 +586,36 @@
):
subprocess.check_call([str(strip_cmd), "--strip-unneeded", str(file)])
+ for lib in (install_path / "lib").iterdir():
+ broken_symlinks = {
+ "libc++abi.so.1.0",
+ "libc++abi.so",
+ "libc++.so.1.0",
+ }
+
+ if lib.name in broken_symlinks:
+ self._check_and_remove_dangling_symlink(lib)
+
+ def _check_and_remove_dangling_symlink(self, path: Path) -> None:
+ """Removes an expected dangling symlink, or raises an error.
+
+ The latest LLVM prebuilts have some dangling symlinks. It's a bug on the LLVM
+ build side, but rather than wait for a respin we just clean up the problems
+ here. This will raise an error whenever we upgrade to a new toolchain that
+ doesn't have these problems, so we'll know when to remove the workaround.
+ """
+ if not path.is_symlink():
+ raise RuntimeError(
+ f"Expected {path} to be a symlink. Update or remove this workaround."
+ )
+ if (dest := path.readlink()).exists():
+ raise RuntimeError(
+ f"Expected {path} to be a dangling symlink, but {dest} exists. Update "
+ "or remove this workaround."
+ )
+
+ path.unlink()
+
def versioned_so(host: Host, lib: str, version: str) -> str:
"""Returns the formatted versioned library for the given host.
@@ -556,7 +637,6 @@
name = "shader-tools"
src = ANDROID_DIR / "external" / "shaderc" / "shaderc"
install_path = Path("shader-tools/{host}")
- run_ctest = True
notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
deps = {"clang"}
@@ -622,15 +702,26 @@
@property
def _libcxx_dir(self) -> Path:
- return self.get_dep("clang").get_build_host_install() / "lib64"
+ return self.get_dep("clang").get_build_host_install() / "lib"
@property
def _libcxx(self) -> List[Path]:
path = self._libcxx_dir
if self.host == Host.Linux:
- return [path / "libc++.so.1"]
+ return [path / "libc++.so"]
return []
+ def build(self) -> None:
+ # These have never behaved properly on Darwin. Local builds haven't worked in
+ # years (presumably an XCode difference), and now CI is failing because of the
+ # same libc++ mismatch as in
+ # https://android-review.googlesource.com/c/platform/ndk/+/2657073. The local
+ # build fails before the failure that happens in CI, so I can't test a fix for
+ # the CI issue. Just disable this until someone that's familiar with the tests
+ # has the time to fix them.
+ self.run_ctest = self.host is not Host.Darwin
+ super().build()
+
def install(self) -> None:
self.get_install_path().mkdir(parents=True, exist_ok=True)
ext = ".exe" if self.host.is_windows else ""
@@ -698,6 +789,14 @@
src = NDK_DIR / "ndk-which"
+def iter_python_lint_paths() -> Iterator[Path]:
+ ndk_package_path = Path("ndk")
+ yield ndk_package_path
+ for app in iter_python_app_modules():
+ if ndk_package_path not in app.package.parents:
+ yield app.package
+
+
@register
class Black(ndk.builds.LintModule):
name = "black"
@@ -712,6 +811,17 @@
@register
+class Isort(ndk.builds.LintModule):
+ name = "isort"
+
+ def run(self) -> None:
+ if not shutil.which("isort"):
+ logging.warning("Skipping isort. isort was not found on your path.")
+ return
+ subprocess.check_call(["isort", "--check", "."])
+
+
+@register
class Pylint(ndk.builds.LintModule):
name = "pylint"
@@ -719,13 +829,13 @@
if not shutil.which("pylint"):
logging.warning("Skipping linting. pylint was not found on your path.")
return
- pylint = [
+ pylint: Sequence[str | Path] = [
"pylint",
- "--rcfile=" + str(ANDROID_DIR / "ndk/pylintrc"),
+ "--rcfile=" + str(ANDROID_DIR / "ndk/pyproject.toml"),
"--score=n",
"build",
- "ndk",
"tests",
+ *iter_python_lint_paths(),
]
subprocess.check_call(pylint)
@@ -739,14 +849,31 @@
logging.warning("Skipping type-checking. mypy was not found on your path.")
return
subprocess.check_call(
- ["mypy", "--config-file", str(ANDROID_DIR / "ndk/mypy.ini"), "ndk"]
+ [
+ "mypy",
+ "--config-file",
+ str(ANDROID_DIR / "ndk/pyproject.toml"),
+ *iter_python_lint_paths(),
+ ]
)
@register
+class Pytest(ndk.builds.LintModule):
+ name = "pytest"
+ deps = {"ndk-stack", "ndk-stack-shortcut"}
+
+ def run(self) -> None:
+ if not shutil.which("pytest"):
+ logging.warning("Skipping pytest. pytest was not found on your path.")
+ return
+ subprocess.check_call(["pytest", "ndk", "tests/pytest"])
+
+
+@register
class PythonLint(ndk.builds.MetaModule):
name = "pythonlint"
- deps = {"black", "mypy", "pylint"}
+ deps = {"black", "isort", "mypy", "pylint", "pytest"}
@register
@@ -801,337 +928,7 @@
def make_linker_script(path: Path, libs: List[str]) -> None:
- ndk.file.write_file(path, "INPUT({})\n".format(" ".join(libs)))
-
-
-@register
-class Libcxx(ndk.builds.Module):
- name = "libc++"
- src = ANDROID_DIR / "toolchain/llvm-project/libcxx"
- install_path = Path("sources/cxx-stl/llvm-libc++")
- notice = src / "LICENSE.TXT"
- notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
- deps = {
- "base-toolchain",
- "ndk-build",
- "ndk-build-shortcut",
- }
-
- @property
- def obj_out(self) -> Path:
- return self.out_dir / "libcxx" / "obj"
-
- @property
- def lib_out(self) -> Path:
- return self.out_dir / "libcxx" / "libs"
-
- def build(self) -> None:
- ndk_build = self.get_dep("ndk-build").get_build_host_install() / "ndk-build"
-
- android_mk = self.src / "Android.mk"
- application_mk = self.src / "Application.mk"
-
- build_cmd = [
- "bash",
- str(ndk_build),
- f"-j{multiprocessing.cpu_count()}",
- "V=1",
- # Since nothing in this build depends on libc++_static, we need to
- # name it to force it to build.
- "APP_MODULES=c++_shared c++_static",
- # Tell ndk-build where all of our makefiles are and where outputs
- # should go. The defaults in ndk-build are only valid if we have a
- # typical ndk-build layout with a jni/{Android,Application}.mk.
- "NDK_PROJECT_PATH=null",
- f"APP_BUILD_SCRIPT={android_mk}",
- f"NDK_APPLICATION_MK={application_mk}",
- f"NDK_OUT={self.obj_out}",
- f"NDK_LIBS_OUT={self.lib_out}",
- # Make sure we don't pick up a cached copy.
- "LIBCXX_FORCE_REBUILD=true",
- ]
-
- print("Running: " + " ".join([pipes.quote(arg) for arg in build_cmd]))
- subprocess.check_call(build_cmd)
-
- def install(self) -> None:
- """Installs headers and makefiles.
-
- The libraries are installed separately, by the Toolchain module."""
- install_root = self.get_install_path()
-
- if install_root.exists():
- shutil.rmtree(install_root)
- install_root.mkdir(parents=True)
-
- shutil.copy2(self.src / "Android.mk", install_root)
- # TODO: Use the includes from sysroot.
- shutil.copytree(self.src / "include", install_root / "include")
-
-
-@register
-class Platforms(ndk.builds.Module):
- name = "platforms"
- install_path = Path("platforms")
-
- deps = {
- "clang",
- }
-
- min_supported_api = MIN_API_LEVEL
-
- # These API levels had no new native APIs. The contents of these platforms
- # directories would be identical to the previous extant API level, so they
- # are not included in the NDK to save space.
- skip_apis = (20, 25)
-
- # Shared with the sysroot, though the sysroot NOTICE actually includes a
- # lot more licenses. Platforms and Sysroot are essentially a single
- # component that is split into two directories only temporarily, so this
- # will be the end state when we merge the two anyway.
- notice = ANDROID_DIR / "prebuilts/ndk/platform/sysroot/NOTICE"
-
- intermediate_module = True
-
- prebuilts_path = ANDROID_DIR / "prebuilts/ndk/platform"
-
- @staticmethod
- def src_path(*args: str) -> Path:
- return ndk.paths.android_path("development/ndk/platforms", *args)
-
- def llvm_tool(self, tool: str) -> Path:
- path = Path(self.get_dep("clang").get_build_host_install())
- return path / f"bin/{tool}"
-
- @staticmethod
- def libdir_name(arch: ndk.abis.Arch) -> str:
- if arch == "x86_64":
- return "lib64"
- return "lib"
-
- def get_apis(self) -> List[int]:
- apis: List[int] = []
- for path in (self.prebuilts_path / "platforms").iterdir():
- name = path.name
- if not name.startswith("android-"):
- continue
-
- _, api_str = name.split("-")
- try:
- api = int(api_str)
- if api >= self.min_supported_api:
- apis.append(api)
- except ValueError as ex:
- # Codenamed release like android-O, android-O-MR1, etc.
- # Codenamed APIs are not supported, since having
- # non-integer API directories breaks all kinds of tools, we
- # rename them when we check them in.
- raise ValueError(
- f"No codenamed API is allowed: {api_str}\n"
- "Use the update_platform.py tool from the "
- "platform/prebuilts/ndk dev branch to remove or rename it."
- ) from ex
-
- return sorted(apis)
-
- @staticmethod
- def get_arches(api: Union[int, str]) -> list[ndk.abis.Arch]:
- arches = [ndk.abis.Arch("arm"), ndk.abis.Arch("x86")]
- # All codenamed APIs are at 64-bit capable.
- if isinstance(api, str) or api >= 21:
- arches.extend([ndk.abis.Arch("arm64"), ndk.abis.Arch("x86_64")])
- return arches
-
- def get_build_cmd(
- self,
- dst: Path,
- srcs: List[Path],
- api: int,
- arch: ndk.abis.Arch,
- build_number: Union[int, str],
- ) -> List[str]:
- libc_includes = ndk.paths.ANDROID_DIR / "bionic/libc"
- arch_common_includes = libc_includes / "arch-common/bionic"
-
- cc = self.llvm_tool("clang")
-
- args = [
- str(cc),
- "-target",
- ndk.abis.clang_target(arch, api),
- "--sysroot",
- str(self.prebuilts_path / "sysroot"),
- "-fuse-ld=lld",
- f"-I{libc_includes}",
- f"-I{arch_common_includes}",
- f"-DPLATFORM_SDK_VERSION={api}",
- f'-DABI_NDK_VERSION="{ndk.config.release}"',
- f'-DABI_NDK_BUILD_NUMBER="{build_number}"',
- "-O2",
- "-fpic",
- "-Wl,-r",
- "-no-pie",
- "-nostdlib",
- "-Wa,--noexecstack",
- "-Wl,-z,noexecstack",
- "-o",
- str(dst),
- ] + [str(src) for src in srcs]
-
- if arch == ndk.abis.Arch("arm64"):
- args.append("-mbranch-protection=standard")
-
- return args
-
- def check_elf_note(self, obj_file: Path) -> None:
- # readelf is a cross platform tool, so arch doesn't matter.
- readelf = self.llvm_tool("llvm-readelf")
- out = subprocess.check_output([str(readelf), "--notes", str(obj_file)])
- if "Android" not in out.decode("utf-8"):
- raise RuntimeError("{} does not contain NDK ELF note".format(obj_file))
-
- def build_crt_object(
- self,
- dst: Path,
- srcs: List[Path],
- api: int,
- arch: ndk.abis.Arch,
- build_number: Union[int, str],
- defines: List[str],
- ) -> None:
- cc_args = self.get_build_cmd(dst, srcs, api, arch, build_number)
- cc_args.extend(defines)
-
- print("Running: " + " ".join([pipes.quote(arg) for arg in cc_args]))
- subprocess.check_call(cc_args)
-
- def build_crt_objects(
- self,
- dst_dir: Path,
- api: int,
- arch: ndk.abis.Arch,
- build_number: Union[int, str],
- ) -> None:
- src_dir = ndk.paths.android_path("bionic/libc/arch-common/bionic")
- crt_brand = ndk.paths.ndk_path("sources/crt/crtbrand.S")
-
- objects = {
- "crtbegin_dynamic.o": [
- src_dir / "crtbegin.c",
- crt_brand,
- ],
- "crtbegin_so.o": [
- src_dir / "crtbegin_so.c",
- crt_brand,
- ],
- "crtbegin_static.o": [
- src_dir / "crtbegin.c",
- crt_brand,
- ],
- "crtend_android.o": [
- src_dir / "crtend.S",
- ],
- "crtend_so.o": [
- src_dir / "crtend_so.S",
- ],
- }
-
- for name, srcs in objects.items():
- dst_path = dst_dir / name
- defs = []
- if name == "crtbegin_static.o":
- # libc.a is always the latest version, so ignore the API level
- # setting for crtbegin_static.
- defs.append("-D_FORCE_CRT_ATFORK")
- self.build_crt_object(dst_path, srcs, api, arch, build_number, defs)
- if name.startswith("crtbegin"):
- self.check_elf_note(dst_path)
-
- def build(self) -> None:
- build_dir = self.out_dir / self.install_path
- if build_dir.exists():
- shutil.rmtree(build_dir)
-
- apis = self.get_apis()
- platforms_meta = json.loads(
- ndk.file.read_file(ndk.paths.ndk_path("meta/platforms.json"))
- )
- max_sysroot_api = apis[-1]
- max_meta_api = platforms_meta["max"]
- if max_sysroot_api != max_meta_api:
- raise RuntimeError(
- f"API {max_sysroot_api} is the newest API level in the "
- "sysroot but does not match meta/platforms.json max of "
- f"{max_meta_api}"
- )
- if max_sysroot_api not in platforms_meta["aliases"].values():
- raise RuntimeError(
- f"API {max_sysroot_api} is the newest API level in the "
- "sysroot but has no alias in meta/platforms.json."
- )
- for api in apis:
- if api in self.skip_apis:
- continue
-
- platform = "android-{}".format(api)
- for arch in self.get_arches(api):
- arch_name = "arch-{}".format(arch)
- dst_dir = build_dir / platform / arch_name
- dst_dir.mkdir(parents=True)
- assert self.context is not None
- self.build_crt_objects(dst_dir, api, arch, self.context.build_number)
-
- def install(self) -> None:
- build_dir = self.out_dir / self.install_path
- install_dir = self.get_install_path()
-
- if install_dir.exists():
- shutil.rmtree(install_dir)
- install_dir.mkdir(parents=True)
-
- for api in self.get_apis():
- if api in self.skip_apis:
- continue
-
- # Copy shared libraries from prebuilts/ndk/platform/platforms.
- platform = "android-{}".format(api)
- platform_src = self.prebuilts_path / "platforms" / platform
- platform_dst = install_dir / "android-{}".format(api)
- shutil.copytree(platform_src, platform_dst)
-
- for arch in self.get_arches(api):
- arch_name = "arch-{}".format(arch)
- triple = ndk.abis.arch_to_triple(arch)
-
- # Install static libraries from prebuilts/ndk/platform/sysroot.
- # TODO: Determine if we can change the build system to use the
- # libraries directly from the sysroot directory rather than
- # duplicating all the libraries in platforms.
- lib_dir = self.prebuilts_path / "sysroot/usr/lib" / triple
- libdir_name = self.libdir_name(arch)
- lib_dir_dst = install_dir / platform / arch_name / "usr" / libdir_name
- for name in os.listdir(lib_dir):
- lib_src = lib_dir / name
- lib_dst = lib_dir_dst / name
- shutil.copy2(lib_src, lib_dst)
-
- if libdir_name == "lib64":
- # The Clang driver won't accept a sysroot that contains
- # only a lib64. An empty lib dir is enough to convince it.
- (install_dir / platform / arch_name / "usr/lib").mkdir(parents=True)
-
- # Install the CRT objects that we just built.
- obj_dir = build_dir / platform / arch_name
- for name in os.listdir(obj_dir):
- obj_src = obj_dir / name
- obj_dst = lib_dir_dst / name
- shutil.copy2(obj_src, obj_dst)
-
- # https://github.com/android-ndk/ndk/issues/372
- for root, dirs, files in os.walk(install_dir):
- if not files and not dirs:
- with open(Path(root) / ".keep_dir", "w") as keep_file:
- keep_file.write("This file forces git to keep the directory.")
+ path.write_text(f"INPUT({' '.join(libs)})\n")
@register
@@ -1292,18 +1089,32 @@
class Sysroot(ndk.builds.Module):
name = "sysroot"
install_path = Path("sysroot")
- notice = ANDROID_DIR / "prebuilts/ndk/platform/sysroot/NOTICE"
+ notice = PREBUILT_SYSROOT / "NOTICE"
intermediate_module = True
+ deps = {"clang"}
+
+ def __init__(self) -> None:
+ super().__init__()
+ self.crt_builder: CrtObjectBuilder | None = None
def build(self) -> None:
- pass
+ build_dir = self.out_dir / self.install_path
+ if build_dir.exists():
+ shutil.rmtree(build_dir)
+
+ assert self.context is not None
+ self.crt_builder = CrtObjectBuilder(
+ self.get_dep("clang").get_build_host_install(),
+ build_dir,
+ self.context.build_number,
+ )
+ self.crt_builder.build()
def install(self) -> None:
install_path = self.get_install_path()
if install_path.exists():
shutil.rmtree(install_path)
- path = ndk.paths.android_path("prebuilts/ndk/platform/sysroot")
- shutil.copytree(path, install_path)
+ shutil.copytree(PREBUILT_SYSROOT, install_path)
if self.host is not Host.Linux:
# linux/netfilter has some headers with names that differ only
# by case, which can't be extracted to a case-insensitive
@@ -1327,81 +1138,42 @@
for remove_path in remove_paths:
os.remove(install_path / remove_path)
- ndk_version_h_path = install_path / "usr/include/android/ndk-version.h"
- with open(ndk_version_h_path, "w") as ndk_version_h:
- major = ndk.config.major
- minor = ndk.config.hotfix
- beta = ndk.config.beta
- canary = "1" if ndk.config.canary else "0"
- assert self.context is not None
+ assert self.context is not None
+ NdkVersionHeaderGenerator(
+ ndk.config.major,
+ ndk.config.hotfix,
+ ndk.config.beta,
+ self.context.build_number,
+ ndk.config.canary,
+ ).write(install_path / "usr/include/android/ndk-version.h")
- ndk_version_h.write(
- textwrap.dedent(
- f"""\
- #pragma once
-
- /**
- * Set to 1 if this is an NDK, unset otherwise. See
- * https://android.googlesource.com/platform/bionic/+/master/docs/defines.md.
- */
- #define __ANDROID_NDK__ 1
-
- /**
- * Major version of this NDK.
- *
- * For example: 16 for r16.
- */
- #define __NDK_MAJOR__ {major}
-
- /**
- * Minor version of this NDK.
- *
- * For example: 0 for r16 and 1 for r16b.
- */
- #define __NDK_MINOR__ {minor}
-
- /**
- * Set to 0 if this is a release build, or 1 for beta 1,
- * 2 for beta 2, and so on.
- */
- #define __NDK_BETA__ {beta}
-
- /**
- * Build number for this NDK.
- *
- * For a local development build of the NDK, this is -1.
- */
- #define __NDK_BUILD__ {self.context.build_number}
-
- /**
- * Set to 1 if this is a canary build, 0 if not.
- */
- #define __NDK_CANARY__ {canary}
- """
- )
+ # Install the CRT objects that we just built.
+ assert self.crt_builder is not None
+ for abi, api, path in self.crt_builder.artifacts:
+ lib_dir_dst = (
+ install_path / "usr/lib" / ndk.abis.abi_to_triple(abi) / str(api)
)
+ obj_dst = lib_dir_dst / path.name
+ shutil.copy2(path, obj_dst)
def write_clang_shell_script(
wrapper_path: Path, clang_name: str, flags: List[str]
) -> None:
- with open(wrapper_path, "w") as wrapper:
- wrapper.write(
- textwrap.dedent(
- """\
+ wrapper_path.write_text(
+ textwrap.dedent(
+ f"""\
#!/usr/bin/env bash
bin_dir=`dirname "$0"`
if [ "$1" != "-cc1" ]; then
- "$bin_dir/{clang}" {flags} "$@"
+ "$bin_dir/{clang_name}" {' '.join(flags)} "$@"
else
# Target is already an argument.
- "$bin_dir/{clang}" "$@"
+ "$bin_dir/{clang_name}" "$@"
fi
- """.format(
- clang=clang_name, flags=" ".join(flags)
- )
- )
+ """
)
+ )
mode = os.stat(wrapper_path).st_mode
os.chmod(wrapper_path, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
@@ -1410,22 +1182,21 @@
def write_clang_batch_script(
wrapper_path: Path, clang_name: str, flags: List[str]
) -> None:
- with open(wrapper_path, "w") as wrapper:
- wrapper.write(
- textwrap.dedent(
- """\
+ wrapper_path.write_text(
+ textwrap.dedent(
+ f"""\
@echo off
setlocal
call :find_bin
- if "%1" == "-cc1" goto :L
+ if "%~1" == "-cc1" goto :L
- set "_BIN_DIR=" && "%_BIN_DIR%{clang}" {flags} %*
+ set "_BIN_DIR=" && "%_BIN_DIR%{clang_name}" {' '.join(flags)} %*
if ERRORLEVEL 1 exit /b 1
goto :done
:L
rem Target is already an argument.
- set "_BIN_DIR=" && "%_BIN_DIR%{clang}" %*
+ set "_BIN_DIR=" && "%_BIN_DIR%{clang_name}" %*
if ERRORLEVEL 1 exit /b 1
goto :done
@@ -1436,11 +1207,9 @@
exit /b
:done
- """.format(
- clang=clang_name, flags=" ".join(flags)
- )
- )
+ """
)
+ )
def write_clang_wrapper(
@@ -1484,22 +1253,19 @@
@register
-class BaseToolchain(ndk.builds.Module):
- """The subset of the toolchain needed to build other toolchain components.
+class Toolchain(ndk.builds.Module):
+ """The LLVM toolchain.
- libc++ is built using this toolchain, and the full toolchain requires
- libc++. The toolchain is split into BaseToolchain and Toolchain to break
- the cyclic dependency.
+ The toolchain includes Clang, LLD, libc++, and LLVM's binutils.
"""
- name = "base-toolchain"
+ name = "toolchain"
# This is installed to the Clang location to avoid migration pain.
install_path = Path("toolchains/llvm/prebuilt/{host}")
notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
deps = {
"clang",
"make",
- "platforms",
"sysroot",
"system-stl",
"yasm",
@@ -1509,21 +1275,45 @@
def notices(self) -> Iterator[Path]:
yield from Clang().notices
yield from Yasm().notices
- yield from Platforms().notices
yield from Sysroot().notices
yield from SystemStl().notices
+ @property
+ def sysroot_install_path(self) -> Path:
+ return self.get_install_path() / "sysroot"
+
+ def toolchain_libcxx_path_for(self, abi: Abi) -> Path:
+ """Returns the path to the toolchain's NDK libc++ artifacts.
+
+ The toolchain artifacts install all the libc++ artifacts to the android_libc++
+ subdirectory rather than anywhere that the driver can find them (because that's
+ still WIP). These are only included in the Linux artifacts.
+ """
+ # The libc++ directories in the toolchain artifacts use yet another spelling of
+ # each ABI.
+ libcxx_arch_name = {
+ Abi("armeabi-v7a"): "arm",
+ Abi("arm64-v8a"): "aarch64",
+ Abi("riscv64"): "riscv64",
+ Abi("x86"): "i386",
+ Abi("x86_64"): "x86_64",
+ }[abi]
+ return (
+ ClangToolchain.path_for_host(Host.Linux)
+ / "android_libc++/ndk"
+ / libcxx_arch_name
+ )
+
def build(self) -> None:
pass
def install(self) -> None:
install_dir = self.get_install_path()
yasm_dir = self.get_dep("yasm").get_install_path()
- platforms_dir = self.get_dep("platforms").get_install_path()
sysroot_dir = self.get_dep("sysroot").get_install_path()
system_stl_dir = self.get_dep("system-stl").get_install_path()
- shutil.copytree(sysroot_dir, install_dir / "sysroot", dirs_exist_ok=True)
+ shutil.copytree(sysroot_dir, self.sysroot_install_path, dirs_exist_ok=True)
exe = ".exe" if self.host.is_windows else ""
shutil.copy2(
@@ -1542,21 +1332,9 @@
# This reduces the size of the NDK by 60M on non-Windows.
os.symlink(lld.name, new_bin_ld)
- platforms = self.get_dep("platforms")
- assert isinstance(platforms, Platforms)
- for api in platforms.get_apis():
- if api in Platforms.skip_apis:
- continue
-
- platform = "android-{}".format(api)
- for arch in platforms.get_arches(api):
- triple = ndk.abis.arch_to_triple(arch)
- arch_name = "arch-{}".format(arch)
- lib_dir = "lib64" if arch == "x86_64" else "lib"
- src_dir = platforms_dir / platform / arch_name / "usr" / lib_dir
- dst_dir = install_dir / "sysroot/usr/lib" / triple / str(api)
- shutil.copytree(src_dir, dst_dir, ignore=shutil.ignore_patterns("*.a"))
-
+ for api in ALL_API_LEVELS:
+ for abi in ndk.abis.iter_abis_for_api(api):
+ triple = ndk.abis.abi_to_triple(abi)
write_clang_wrapper(
install_dir / "bin", api, triple, self.host.is_windows
)
@@ -1570,123 +1348,97 @@
system_stl_inc_src = system_stl_dir / "include"
system_stl_inc_dst = system_stl_hdr_dir / "4.9.x"
shutil.copytree(system_stl_inc_src, system_stl_inc_dst)
+ self.relocate_libcxx()
+ self.create_libcxx_linker_scripts()
+ def relocate_libcxx(self) -> None:
+ """Relocate libc++ so its discoverable by the Clang driver.
-@register
-class Vulkan(ndk.builds.Module):
- name = "vulkan"
- install_path = Path("sources/third_party/vulkan")
- notice = ANDROID_DIR / "external/vulkan-headers/NOTICE"
+ The NDK libc++ in the toolchain prebuilts is not installed to a location that
+ the driver is able to find by default. Move it to a driver searched directory.
+ """
+ # The Clang driver automatically uses the following library search directories
+ # (relative to the LLVM install root, for an aarch64-linux-android21 target and
+ # LLVM 17):
+ #
+ # 1. lib/clang/17/lib/linux/aarch64
+ # 2. bin/../sysroot/usr/lib/aarch64-linux-android/21
+ # 3. bin/../sysroot/usr/lib/aarch64-linux-android
+ # 4. bin/../sysroot/usr/lib
+ #
+ # The sysroot directory comes from the platform's sysroot artifact, so it's best
+ # to avoid installing to that (if we install there, the platform's artifact
+ # can't be used directly; it needs to have NDK components installed to it).
+ #
+ # However, AGP (and probably other systems) expect to find libc++_shared.so in
+ # sysroot/usr/lib/$TRIPLE, so we should continue using that path for the time
+ # being. At some point we should move all the libc++ details into the
+ # toolchain's directories so it's easier to use an arbitrary sysroot (e.g. for
+ # previewing Android APIs without needing a whole new NDK), but we can't do that
+ # for the headers yet anyway (see below). Keep compatible for now.
+ usr_lib = self.sysroot_install_path / "usr/lib"
+ for abi in ALL_ABIS:
+ dest = usr_lib / ndk.abis.abi_to_triple(abi)
+ src = self.toolchain_libcxx_path_for(abi) / "lib"
+ for lib in src.iterdir():
+ shutil.copy2(lib, dest / lib.name)
- def build(self) -> None:
- pass
+ # libc++ headers for Android will currently only be found in the sysroot:
+ # https://github.com/llvm/llvm-project/blob/c64f10bfe20308ebc7d5d18912cd0ba82a44eaa1/clang/lib/Driver/ToolChains/Gnu.cpp#L3080-L3084
+ #
+ # We ought to revert that driver behavior (which shouldn't be contentious, since
+ # it's our patch in the first place), but for now we'll continue installing the
+ # libc++ headers to the sysroot.
+ src = ClangToolchain.path_for_host(Host.Linux) / "include/c++/v1"
+ dest = self.sysroot_install_path / "usr/include/c++/v1"
+ if dest.exists():
+ shutil.rmtree(dest)
+ dest.parent.mkdir(parents=True, exist_ok=True)
+ shutil.copytree(src, dest)
- def install(self) -> None:
- default_ignore_patterns = shutil.ignore_patterns(
- "*CMakeLists.txt", "*test.cc", "linux", "windows"
- )
+ # There's also an Android-specific __config_site header that we need to install.
+ shutil.copy2(self.find_libcxx_config_site(), dest / "__config_site")
- source_dir = ANDROID_DIR / "external/vulkan-headers"
- dest_dir = self.get_install_path() / "src"
- for d in ["include", "registry"]:
- src = source_dir / d
- dst = dest_dir / d
- shutil.rmtree(dst, ignore_errors=True)
- shutil.copytree(src, dst, ignore=default_ignore_patterns)
+ def find_libcxx_config_site(self) -> Path:
+ """Finds the __config_site file for the NDK libc++.
- android_mk = dest_dir / "build-android/jni/Android.mk"
- android_mk.parent.mkdir(parents=True, exist_ok=True)
- url = "https://github.com/KhronosGroup/Vulkan-ValidationLayers"
- android_mk.write_text(
- textwrap.dedent(
- f"""\
- $(warning The Vulkan Validation Layers are now distributed on \\
- GitHub. See {url} for more information.)
- """
- )
- )
-
-
-@register
-class Toolchain(ndk.builds.Module):
- """The complete toolchain.
-
- BaseToolchain installs the core of the toolchain. This module installs the
- STL to that toolchain.
- """
-
- name = "toolchain"
- # This is installed to the Clang location to avoid migration pain.
- install_path = Path("toolchains/llvm/prebuilt/{host}")
- notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
- deps = {
- "base-toolchain",
- "libc++",
- "libc++abi",
- "platforms",
- }
-
- @property
- def notices(self) -> Iterator[Path]:
- yield from Libcxx().notices
- yield from Libcxxabi().notices
-
- def build(self) -> None:
- pass
-
- def install(self) -> None:
- install_dir = self.get_install_path()
- libcxx_dir = self.get_dep("libc++").get_install_path()
- libcxxabi_dir = self.get_dep("libc++abi").get_install_path()
-
- libcxx_hdr_dir = install_dir / "sysroot/usr/include/c++"
- libcxx_hdr_dir.mkdir(parents=True)
- libcxx_inc_src = libcxx_dir / "include"
- libcxx_inc_dst = libcxx_hdr_dir / "v1"
- shutil.copytree(libcxx_inc_src, libcxx_inc_dst)
-
- libcxxabi_inc_src = libcxxabi_dir / "include"
- shutil.copytree(libcxxabi_inc_src, libcxx_inc_dst, dirs_exist_ok=True)
-
- for arch in ndk.abis.ALL_ARCHITECTURES:
- triple = ndk.abis.arch_to_triple(arch)
- (abi,) = ndk.abis.arch_to_abis(arch)
- sysroot_dst = install_dir / "sysroot/usr/lib" / triple
-
- shutil.copy2(
- self.out_dir / "libcxx" / "libs" / abi / "libc++_shared.so", sysroot_dst
- )
- static_libs = [
- "libc++_static.a",
- "libc++abi.a",
- ]
-
- for lib in static_libs:
- shutil.copy2(
- self.out_dir / "libcxx" / "obj" / "local" / abi / lib, sysroot_dst
+ That header exists per-ABI in the android_libc++ directory, but they should all
+ be identical and the driver doesn't search per-ABI include directories for
+ libc++. Verify that they are actually identical and return one of them
+ arbitrarily.
+ """
+ config_sites: list[Path] = []
+ for abi in ALL_ABIS:
+ includes = self.toolchain_libcxx_path_for(abi) / "include"
+ config_sites.extend(includes.glob("**/__config_site"))
+ first = config_sites[0]
+ contents = first.read_bytes()
+ for config_site in config_sites[1:]:
+ if config_site.read_bytes() != contents:
+ raise RuntimeError(
+ f"Expected all NDK __config_site files to be identical. {first} "
+ f"and {config_site} have different contents."
)
+ return first
- platforms = self.get_dep("platforms")
- assert isinstance(platforms, Platforms)
- # Also install a libc++.so and libc++.a linker script per API level.
- for api in platforms.get_apis():
- if api in Platforms.skip_apis:
- continue
+ def create_libcxx_linker_scripts(self) -> None:
+ """Install per-target linker scripts for libc++.so and libc++.a.
- for arch in platforms.get_arches(api):
- triple = ndk.abis.arch_to_triple(arch)
+ Clang is going to try to use `-lc++`, not `-lc++_shared` or
+ `-lc++_static -lc++abi`. Linker scripts paper over those details.
+
+ These are per-target for historical reasons (pre-21 needed libandroid_support,
+ arm32 needed libunwind). These could probably be reduced to a single linker
+ script now.
+ """
+ install_dir = self.get_install_path()
+ for api in ALL_API_LEVELS:
+ for abi in ndk.abis.iter_abis_for_api(api):
+ triple = ndk.abis.abi_to_triple(abi)
dst_dir = install_dir / "sysroot/usr/lib" / triple / str(api)
- static_script = ["-lc++_static", "-lc++abi"]
- shared_script = ["-lc++_shared"]
-
- libcxx_so_path = dst_dir / "libc++.so"
- with open(libcxx_so_path, "w") as script:
- script.write("INPUT({})".format(" ".join(shared_script)))
-
- libcxx_a_path = dst_dir / "libc++.a"
- with open(libcxx_a_path, "w") as script:
- script.write("INPUT({})".format(" ".join(static_script)))
+ (dst_dir / "libc++.so").write_text("INPUT(-lc++_shared)")
+ (dst_dir / "libc++.a").write_text("INPUT(-lc++_static -lc++abi)")
def make_format_value(value: Any) -> Any:
@@ -1744,8 +1496,7 @@
abi_infos[f"NDK_ABI_{abi}_ARCH"] = arch
abi_infos[f"NDK_ABI_{abi}_TRIPLE"] = triple
abi_infos[f"NDK_ABI_{abi}_LLVM_TRIPLE"] = llvm_triple
- abi_infos[f"NDK_PROC_{proc}_ABI"] = abi
- abi_infos[f"NDK_ARCH_{arch}_ABI"] = abi
+ abi_infos[f"NDK_ABI_{abi}_MIN_OS_VERSION"] = int(abi_data["min_os_version"])
meta_vars = {
"NDK_DEFAULT_ABIS": sorted(default_abis),
@@ -1856,17 +1607,12 @@
) -> None:
install_path = self.get_install_path()
json_path = self.get_dep("meta").get_install_path() / (name + ".json")
- meta = json.loads(ndk.file.read_file(json_path))
+ with json_path.open(encoding="utf-8") as json_file:
+ meta = json.load(json_file)
meta_vars = func(meta)
- ndk.file.write_file(
- install_path / "core/{}.mk".format(name),
- var_dict_to_make(meta_vars),
- )
- ndk.file.write_file(
- install_path / "cmake/{}.cmake".format(name),
- var_dict_to_cmake(meta_vars),
- )
+ (install_path / f"core/{name}.mk").write_text(var_dict_to_make(meta_vars))
+ (install_path / f"cmake/{name}.cmake").write_text(var_dict_to_cmake(meta_vars))
@register
@@ -1884,13 +1630,6 @@
@register
-class Libcxxabi(ndk.builds.PackageModule):
- name = "libc++abi"
- install_path = Path("sources/cxx-stl/llvm-libc++abi")
- src = ANDROID_DIR / "toolchain/llvm-project/libcxxabi"
-
-
-@register
class SimplePerf(ndk.builds.Module):
name = "simpleperf"
install_path = Path("simpleperf")
@@ -1945,18 +1684,17 @@
@register
-class NdkGdb(ndk.builds.MultiFileModule):
+class NdkGdb(ndk.builds.PythonApplication):
name = "ndk-gdb"
- install_path = Path("prebuilt/{host}/bin")
+ install_path = Path("prebuilt/{host}/bin/ndkgdb.pyz")
notice = NDK_DIR / "NOTICE"
-
- @property
- def files(self) -> Iterator[Path]:
- yield NDK_DIR / "ndk-gdb"
- yield NDK_DIR / "ndk-gdb.py"
-
- if self.host.is_windows:
- yield NDK_DIR / "ndk-gdb.cmd"
+ package = NDK_DIR / "ndkgdb.py"
+ main = "ndkgdb:main"
+ pip_dependencies = [
+ ANDROID_DIR / "development/python-packages/adb",
+ ANDROID_DIR / "development/python-packages/gdbrunner",
+ ]
+ deps = {"ndk-gdb-shortcut", "ndk-lldb-shortcut"}
@register
@@ -1976,18 +1714,19 @@
@register
-class NdkStack(ndk.builds.MultiFileModule):
+class NdkStack(ndk.builds.PythonApplication):
name = "ndk-stack"
- install_path = Path("prebuilt/{host}/bin")
+ install_path = Path("prebuilt/{host}/bin/ndkstack.pyz")
notice = NDK_DIR / "NOTICE"
-
- @property
- def files(self) -> Iterator[Path]:
- yield NDK_DIR / "ndk-stack"
- yield NDK_DIR / "ndk-stack.py"
-
- if self.host.is_windows:
- yield NDK_DIR / "ndk-stack.cmd"
+ package = NDK_DIR / "ndkstack.py"
+ main = "ndkstack:main"
+ deps = {
+ # PythonApplication depends on build/tools/ndk_bin_common.sh.
+ "ndk-build",
+ "ndk-stack-shortcut",
+ # PythonApplication depends on Python, which is bundled with Clang.
+ "toolchain",
+ }
@register
@@ -2012,6 +1751,7 @@
install_path = Path("ndk-build")
script = Path("build/ndk-build")
windows_ext = ".cmd"
+ disallow_windows_install_path_with_spaces = True
@register
@@ -2055,19 +1795,58 @@
no_notice = True
deps = {
- "base-toolchain",
+ "toolchain",
}
+ @staticmethod
+ def find_max_api_level_in_prebuilts() -> int:
+ max_api = 0
+ for path in PREBUILT_SYSROOT.glob("usr/lib/*/*"):
+ if not path.is_dir():
+ continue
+
+ try:
+ api = int(path.name)
+ max_api = max(max_api, api)
+ except ValueError as ex:
+ # Codenamed release like android-O, android-O-MR1, etc.
+ # Codenamed APIs are not supported, since having
+ # non-integer API directories breaks all kinds of tools, we
+ # rename them when we check them in.
+ raise ValueError(
+ f"Codenamed APIs are not allowed: {path}\n"
+ "Use the update_platform.py tool from the "
+ "platform/prebuilts/ndk dev branch to remove or rename it."
+ ) from ex
+
+ return max_api
+
+ def validate(self) -> None:
+ super().validate()
+
+ max_sysroot_api = self.find_max_api_level_in_prebuilts()
+ if max_sysroot_api != MAX_API_LEVEL:
+ raise RuntimeError(
+ f"API {max_sysroot_api} is the newest API level in {PREBUILT_SYSROOT} "
+ f"sysroot but does not match meta/platforms.json max of {MAX_API_LEVEL}"
+ )
+ if max_sysroot_api not in API_LEVEL_ALIASES.values():
+ raise RuntimeError(
+ f"API {max_sysroot_api} is the newest API level in {PREBUILT_SYSROOT} "
+ "but has no alias in meta/platforms.json."
+ )
+
def install(self) -> None:
super().install()
self.create_system_libs_meta()
+ self.add_min_api_data_to_abis()
def create_system_libs_meta(self) -> None:
# Build system_libs.json based on what we find in the toolchain. We
# only need to scan a single 32-bit architecture since these libraries
# do not vary in availability across architectures.
sysroot_base = (
- self.get_dep("base-toolchain").get_install_path()
+ self.get_dep("toolchain").get_install_path()
/ "sysroot/usr/lib/arm-linux-androideabi"
)
@@ -2104,9 +1883,20 @@
system_libs = collections.OrderedDict(sorted(system_libs.items()))
json_path = self.get_install_path() / "system_libs.json"
- with open(json_path, "w") as json_file:
+ with json_path.open("w", encoding="utf-8") as json_file:
json.dump(system_libs, json_file, indent=2, separators=(",", ": "))
+ def add_min_api_data_to_abis(self) -> None:
+ json_path = self.get_install_path() / "abis.json"
+ with json_path.open(encoding="utf-8") as json_file:
+ data = json.load(json_file)
+
+ for abi_name, abi_data in data.items():
+ abi_data["min_os_version"] = ndk.abis.min_api_for_abi(Abi(abi_name))
+
+ with json_path.open("w", encoding="utf-8") as json_file:
+ json.dump(data, json_file, indent=2, separators=(",", ": "))
+
@register
class WrapSh(ndk.builds.PackageModule):
@@ -2127,14 +1917,18 @@
def install(self) -> None:
path = self.get_install_path()
- with open(path, "w") as source_properties:
- assert self.context is not None
- version = get_version_string(self.context.build_number)
- if ndk.config.beta > 0:
- version += "-beta{}".format(ndk.config.beta)
- source_properties.writelines(
- ["Pkg.Desc = Android NDK\n", "Pkg.Revision = {}\n".format(version)]
+ assert self.context is not None
+ version = get_version_string(self.context.build_number)
+ if ndk.config.beta > 0:
+ version += "-beta{}".format(ndk.config.beta)
+ path.write_text(
+ textwrap.dedent(
+ f"""\
+ Pkg.Desc = Android NDK
+ Pkg.Revision = {version}
+ """
)
+ )
def create_notice_file(path: Path, for_group: ndk.builds.NoticeGroup) -> None:
@@ -2287,15 +2081,22 @@
NAMES_TO_MODULES = {m.name: m for m in ALL_MODULES}
+def iter_python_app_modules() -> Iterator[ndk.builds.PythonApplication]:
+ """Returns an Iterator over all python applications."""
+ for module in ALL_MODULES:
+ if isinstance(module, ndk.builds.PythonApplication):
+ yield module
+
+
def get_all_module_names() -> List[str]:
return [m.name for m in ALL_MODULES if m.enabled]
-def build_number_arg(value: str) -> str:
+def build_number_arg(value: str) -> int:
if value.startswith("P"):
# Treehugger build. Treat as a local development build.
- return "0"
- return value
+ return 0
+ return int(value)
def parse_args() -> Tuple[argparse.Namespace, List[str]]:
@@ -2426,17 +2227,16 @@
def log_build_failure(log_path: Path, dist_dir: Path) -> None:
- with open(log_path, "r") as log_file:
- contents = log_file.read()
- print(contents)
+ contents = log_path.read_text()
+ print(contents)
- # The build server has a build_error.log file that is supposed to be
- # the short log of the failure that stopped the build. Append our
- # failing log to that.
- build_error_log = dist_dir / "logs/build_error.log"
- with open(build_error_log, "a") as error_log:
- error_log.write("\n")
- error_log.write(contents)
+ # The build server has a build_error.log file that is supposed to be
+ # the short log of the failure that stopped the build. Append our
+ # failing log to that.
+ build_error_log = dist_dir / "logs/build_error.log"
+ with build_error_log.open("a", encoding="utf-8") as error_log:
+ error_log.write("\n")
+ error_log.write(contents)
def launch_buildable(
@@ -2718,6 +2518,7 @@
test_timer = ndk.timer.Timer()
with test_timer:
if args.build_tests:
+ print("Building tests...")
purge_unwanted_files(ndk_dir)
good = build_ndk_tests(out_dir, dist_dir, args)
print() # Blank line between test results and timing data.
diff --git a/ndk/cmake.py b/ndk/cmake.py
index f148d31..29555a7 100644
--- a/ndk/cmake.py
+++ b/ndk/cmake.py
@@ -15,18 +15,18 @@
#
"""APIs for dealing with cmake scripts."""
-from functools import cached_property
import os
-from pathlib import Path
import pprint
import shlex
import shutil
import subprocess
+from functools import cached_property
+from pathlib import Path
from typing import Dict, List, Optional
-from ndk.hosts import Host
import ndk.paths
import ndk.toolchains
+from ndk.hosts import Host
SYSTEM_NAME_MAP = {
Host.Darwin: "Darwin",
diff --git a/ndk/config.py b/ndk/config.py
index 955635d..0e3d80e 100644
--- a/ndk/config.py
+++ b/ndk/config.py
@@ -1,7 +1,6 @@
from __future__ import print_function
-
-major = 26
+major = 27
hotfix = 0
hotfix_str = chr(ord("a") + hotfix) if hotfix else ""
beta = 0
diff --git a/ndk/crtobjectbuilder.py b/ndk/crtobjectbuilder.py
new file mode 100644
index 0000000..09f1c96
--- /dev/null
+++ b/ndk/crtobjectbuilder.py
@@ -0,0 +1,166 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Helper class for building CRT objects."""
+import shlex
+import shutil
+import subprocess
+from pathlib import Path
+
+import ndk.config
+from ndk.platforms import ALL_API_LEVELS
+
+from .abis import Abi, abi_to_triple, clang_target, iter_abis_for_api
+from .paths import ANDROID_DIR, NDK_DIR
+
+
+class CrtObjectBuilder:
+ """Builder for NDK CRT objects."""
+
+ PREBUILTS_PATH = ANDROID_DIR / "prebuilts/ndk/platform"
+
+ def __init__(self, llvm_path: Path, build_dir: Path, build_id: int) -> None:
+ self.llvm_path = llvm_path
+ self.build_dir = build_dir
+ self.build_id = build_id
+ self.artifacts: list[tuple[Abi, int, Path]] = []
+
+ def llvm_tool(self, tool: str) -> Path:
+ """Returns the path to the given LLVM tool."""
+ return self.llvm_path / "bin" / tool
+
+ def get_build_cmd(
+ self,
+ dst: Path,
+ srcs: list[Path],
+ api: int,
+ abi: Abi,
+ build_number: int,
+ ) -> list[str]:
+ """Returns the build command for creating a CRT object."""
+ libc_includes = ANDROID_DIR / "bionic/libc"
+ arch_common_includes = libc_includes / "arch-common/bionic"
+
+ cc = self.llvm_tool("clang")
+
+ args = [
+ str(cc),
+ "-target",
+ clang_target(abi, api),
+ "--sysroot",
+ str(self.PREBUILTS_PATH / "sysroot"),
+ "-fuse-ld=lld",
+ f"-I{libc_includes}",
+ f"-I{arch_common_includes}",
+ f"-DPLATFORM_SDK_VERSION={api}",
+ f'-DABI_NDK_VERSION="{ndk.config.release}"',
+ f'-DABI_NDK_BUILD_NUMBER="{build_number}"',
+ "-O2",
+ "-fpic",
+ "-Wl,-r",
+ "-no-pie",
+ "-nostdlib",
+ "-Wa,--noexecstack",
+ "-Wl,-z,noexecstack",
+ "-o",
+ str(dst),
+ ] + [str(src) for src in srcs]
+
+ if abi == Abi("arm64-v8a"):
+ args.append("-mbranch-protection=standard")
+
+ if dst.name == "crtbegin_static.o":
+ args.append("-DCRTBEGIN_STATIC")
+
+ return args
+
+ def check_elf_note(self, obj_file: Path) -> None:
+ """Verifies that the object file contains the expected note."""
+ # readelf is a cross platform tool, so arch doesn't matter.
+ readelf = self.llvm_tool("llvm-readelf")
+ out = subprocess.run(
+ [readelf, "--notes", obj_file], check=True, text=True, capture_output=True
+ ).stdout
+ if "Android" not in out:
+ raise RuntimeError(f"{obj_file} does not contain NDK ELF note")
+
+ def build_crt_object(
+ self,
+ dst: Path,
+ srcs: list[Path],
+ api: int,
+ abi: Abi,
+ build_number: int,
+ defines: list[str],
+ ) -> None:
+ cc_args = self.get_build_cmd(dst, srcs, api, abi, build_number)
+ cc_args.extend(defines)
+
+ print(f"Running: {shlex.join(cc_args)}")
+ subprocess.check_call(cc_args)
+
+ def build_crt_objects(
+ self,
+ dst_dir: Path,
+ api: int,
+ abi: Abi,
+ build_number: int,
+ ) -> None:
+ src_dir = ANDROID_DIR / "bionic/libc/arch-common/bionic"
+ crt_brand = NDK_DIR / "sources/crt/crtbrand.S"
+
+ objects = {
+ "crtbegin_dynamic.o": [
+ src_dir / "crtbegin.c",
+ crt_brand,
+ ],
+ "crtbegin_so.o": [
+ src_dir / "crtbegin_so.c",
+ crt_brand,
+ ],
+ "crtbegin_static.o": [
+ src_dir / "crtbegin.c",
+ crt_brand,
+ ],
+ "crtend_android.o": [
+ src_dir / "crtend.S",
+ ],
+ "crtend_so.o": [
+ src_dir / "crtend_so.S",
+ ],
+ }
+
+ for name, srcs in objects.items():
+ dst_path = dst_dir / name
+ defs = []
+ if name == "crtbegin_static.o":
+ # libc.a is always the latest version, so ignore the API level
+ # setting for crtbegin_static.
+ defs.append("-D_FORCE_CRT_ATFORK")
+ self.build_crt_object(dst_path, srcs, api, abi, build_number, defs)
+ if name.startswith("crtbegin"):
+ self.check_elf_note(dst_path)
+ self.artifacts.append((abi, api, dst_path))
+
+ def build(self) -> None:
+ self.artifacts = []
+ if self.build_dir.exists():
+ shutil.rmtree(self.build_dir)
+
+ for api in ALL_API_LEVELS:
+ for abi in iter_abis_for_api(api):
+ dst_dir = self.build_dir / abi_to_triple(abi) / str(api)
+ dst_dir.mkdir(parents=True, exist_ok=True)
+ self.build_crt_objects(dst_dir, api, abi, self.build_id)
diff --git a/ndk/deps.py b/ndk/deps.py
index 43fee41..adbdfb8 100644
--- a/ndk/deps.py
+++ b/ndk/deps.py
@@ -16,8 +16,8 @@
"""Performs dependency tracking for ndk.builds modules."""
from typing import Dict, Iterable, List, Set
-from ndk.builds import Module
import ndk.graph
+from ndk.builds import Module
class CyclicDependencyError(RuntimeError):
diff --git a/ndk/ext/os.py b/ndk/ext/os.py
index 4db888d..8239012 100644
--- a/ndk/ext/os.py
+++ b/ndk/ext/os.py
@@ -19,7 +19,7 @@
import contextlib
import os
from pathlib import Path
-from typing import ContextManager, MutableMapping, Iterator
+from typing import ContextManager, Iterator, MutableMapping
@contextlib.contextmanager
diff --git a/ndk/ext/subprocess.py b/ndk/ext/subprocess.py
index cdb5e7f..f21772e 100644
--- a/ndk/ext/subprocess.py
+++ b/ndk/ext/subprocess.py
@@ -21,7 +21,6 @@
import sys
from typing import Any, Sequence, Tuple
-
# TODO: Remove in favor of subprocess.run.
diff --git a/ndk/file.py b/ndk/file.py
deleted file mode 100644
index 57a1ab7..0000000
--- a/ndk/file.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Contains file I/O APIs."""
-from pathlib import Path
-
-
-def read_file(path: Path) -> str:
- """Reads the contents of a file into a string, closing the file."""
- with open(path) as the_file:
- return the_file.read()
-
-
-def write_file(path: Path, contents: str) -> None:
- """Writes the given string to the path specified, closing the file."""
- with open(path, "w") as the_file:
- the_file.write(contents)
diff --git a/ndk/ndkbuild.py b/ndk/ndkbuild.py
index ad98d1a..3e477e2 100644
--- a/ndk/ndkbuild.py
+++ b/ndk/ndkbuild.py
@@ -17,8 +17,8 @@
from __future__ import absolute_import
import os
-from pathlib import Path
import subprocess
+from pathlib import Path
from subprocess import CompletedProcess
diff --git a/ndk/ndkversionheadergenerator.py b/ndk/ndkversionheadergenerator.py
new file mode 100644
index 0000000..487f16d
--- /dev/null
+++ b/ndk/ndkversionheadergenerator.py
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import textwrap
+from pathlib import Path
+
+
+class NdkVersionHeaderGenerator:
+ def __init__(
+ self, major: int, minor: int, beta: int, build_number: int, canary: bool
+ ) -> None:
+ self.major = major
+ self.minor = minor
+ self.beta = beta
+ self.build_number = build_number
+ self.canary = canary
+
+ def generate_str(self) -> str:
+ canary = 1 if self.canary else 0
+ return textwrap.dedent(
+ f"""\
+ #pragma once
+
+ /**
+ * Set to 1 if this is an NDK, unset otherwise. See
+ * https://android.googlesource.com/platform/bionic/+/master/docs/defines.md.
+ */
+ #define __ANDROID_NDK__ 1
+
+ /**
+ * Major version of this NDK.
+ *
+ * For example: 16 for r16.
+ */
+ #define __NDK_MAJOR__ {self.major}
+
+ /**
+ * Minor version of this NDK.
+ *
+ * For example: 0 for r16 and 1 for r16b.
+ */
+ #define __NDK_MINOR__ {self.minor}
+
+ /**
+ * Set to 0 if this is a release build, or 1 for beta 1,
+ * 2 for beta 2, and so on.
+ */
+ #define __NDK_BETA__ {self.beta}
+
+ /**
+ * Build number for this NDK.
+ *
+ * For a local development build of the NDK, this is 0.
+ */
+ #define __NDK_BUILD__ {self.build_number}
+
+ /**
+ * Set to 1 if this is a canary build, 0 if not.
+ */
+ #define __NDK_CANARY__ {canary}
+ """
+ )
+
+ def write(self, output: Path) -> None:
+ output.write_text(self.generate_str())
diff --git a/ndk/paths.py b/ndk/paths.py
index e6140bc..5cb78a2 100644
--- a/ndk/paths.py
+++ b/ndk/paths.py
@@ -21,9 +21,9 @@
import ndk.config
import ndk.hosts
-
ANDROID_DIR = Path(__file__).resolve().parents[2]
NDK_DIR = ANDROID_DIR / "ndk"
+PREBUILT_SYSROOT = ANDROID_DIR / "prebuilts/ndk/platform/sysroot"
DEVICE_TEST_BASE_DIR = PurePosixPath("/data/local/tmp/tests")
diff --git a/ndk/platforms.py b/ndk/platforms.py
index 9b32681..75d161d 100644
--- a/ndk/platforms.py
+++ b/ndk/platforms.py
@@ -19,12 +19,14 @@
from .paths import NDK_DIR
-def _load_data() -> tuple[int, int]:
+def _load_data() -> tuple[int, int, dict[str, int]]:
"""Loads and returns the min and max supported versions."""
with (NDK_DIR / "meta/platforms.json").open() as platforms:
data = json.load(platforms)
- return data["min"], data["max"]
+ return data["min"], data["max"], data["aliases"]
-MIN_API_LEVEL, MAX_API_LEVEL = _load_data()
+MIN_API_LEVEL, MAX_API_LEVEL, API_LEVEL_ALIASES = _load_data()
+ALL_API_LEVELS = list(range(MIN_API_LEVEL, MAX_API_LEVEL + 1))
FIRST_LP64_API_LEVEL = 21
+FIRST_RISCV64_API_LEVEL = 35
diff --git a/ndk/pythonenv.py b/ndk/pythonenv.py
index 881ceba..4313ceb 100644
--- a/ndk/pythonenv.py
+++ b/ndk/pythonenv.py
@@ -14,16 +14,15 @@
# limitations under the License.
#
"""Tools for verifying and fixing our Python environment."""
-from pathlib import Path
import shutil
import site
import sys
import textwrap
+from pathlib import Path
from .hosts import Host
from .paths import ANDROID_DIR
-
PYTHON_DOCS = "https://android.googlesource.com/platform/ndk/+/master/docs/Building.md#python-environment-setup"
@@ -44,6 +43,11 @@
# actually is the one from prebuilts. We still want to verify this when we're
# running on a machine without Poetry because that's probably a build server.
return
+ if Host.current() is Host.Darwin and shutil.which("poetry") is not None:
+ # On macOS the prebuilt python can't be used for the poetry environment because
+ # our python doesn't have support for the ssl module, and for whatever reason
+ # that's consistently required on macOS but not on other platforms.
+ return
prebuilt = python_path()
if interp != prebuilt:
sys.exit(
diff --git a/ndk/run_tests.py b/ndk/run_tests.py
index a5fea21..bc48600 100755
--- a/ndk/run_tests.py
+++ b/ndk/run_tests.py
@@ -15,46 +15,41 @@
# limitations under the License.
#
"""Runs the tests built by make_tests.py."""
-from __future__ import absolute_import
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import argparse
import collections
-from collections.abc import Iterator
-from contextlib import contextmanager
import datetime
import logging
-from pathlib import Path
import random
import shutil
import site
import subprocess
import sys
import time
-from typing import (
- Dict,
- Iterable,
- List,
- Mapping,
- Optional,
-)
+from collections.abc import Iterator
+from contextlib import contextmanager
+from pathlib import Path, PurePosixPath
+from typing import Dict, Iterable, List, Mapping, Optional
import ndk.ansi
import ndk.archive
import ndk.ext.subprocess
import ndk.notify
import ndk.paths
-import ndk.test.buildtest.case
import ndk.test.builder
-from ndk.test.devicetest.case import TestCase
-from ndk.test.devicetest.scanner import ConfigFilter, enumerate_tests
+import ndk.test.buildtest.case
+import ndk.test.ui
+import ndk.ui
from ndk.test.devices import (
Device,
+ DeviceConfig,
DeviceFleet,
DeviceShardingGroup,
find_devices,
- DeviceConfig,
)
+from ndk.test.devicetest.case import TestCase
+from ndk.test.devicetest.scanner import ConfigFilter, enumerate_tests
from ndk.test.filters import TestFilter
from ndk.test.printers import Printer, StdoutPrinter
from ndk.test.report import Report
@@ -68,12 +63,10 @@
UnexpectedSuccess,
)
from ndk.test.spec import BuildConfiguration, TestSpec
-import ndk.test.ui
from ndk.timer import Timer
-import ndk.ui
from ndk.workqueue import ShardingWorkQueue, Worker, WorkQueue
-from .pythonenv import ensure_python_environment
+from .pythonenv import ensure_python_environment
AdbResult = tuple[int, str, str, str]
@@ -162,7 +155,7 @@
def push_tests_to_device(
worker: Worker,
src_dir: Path,
- dest_dir: Path,
+ dest_dir: PurePosixPath,
config: BuildConfiguration,
device: Device,
use_sync: bool,
@@ -198,18 +191,6 @@
device.shell(["chmod", "-R", "777", str(dest_dir)])
-def finish_workqueue_with_ui(workqueue: WorkQueue) -> None:
- console = ndk.ansi.get_console()
- ui = ndk.ui.get_work_queue_ui(console, workqueue)
- with ndk.ansi.disable_terminal_echo(sys.stdin):
- with console.cursor_hide_context():
- ui.draw()
- while not workqueue.finished():
- workqueue.get_result()
- ui.draw()
- ui.clear()
-
-
def push_tests_to_devices(
workqueue: WorkQueue,
test_dir: Path,
@@ -225,7 +206,7 @@
push_tests_to_device, src_dir, dest_dir, config, device, use_sync
)
- finish_workqueue_with_ui(workqueue)
+ ndk.ui.finish_workqueue_with_ui(workqueue, ndk.ui.get_work_queue_ui)
print("Finished pushing tests")
@@ -311,7 +292,7 @@
test_cases: Iterable[TestCase],
) -> None:
for group in fleet.get_missing():
- device_config = DeviceConfig(group.abis, group.version)
+ device_config = DeviceConfig(group.abis, group.version, group.supports_mte)
if not device_config.can_run_build_config(build_config):
# These are a configuration that will never be valid, like a minSdkVersion
# 30 test on an API 21 device. No need to report these.
@@ -325,7 +306,7 @@
def wait_for_results(
report: Report[DeviceShardingGroup],
- workqueue: ShardingWorkQueue[TestResult, DeviceShardingGroup],
+ workqueue: ShardingWorkQueue[TestResult, Device],
printer: Printer,
) -> None:
console = ndk.ansi.get_console()
@@ -357,18 +338,12 @@
if "Could not find exit status in shell output." in result.message:
return True
- # These libc++ tests expect to complete in a specific amount of time,
- # and commonly fail under high load.
- name = result.test.name
- if "libc++.libcxx/thread" in name or "libc++.std/thread" in name:
- return True
-
return False
def restart_flaky_tests(
report: Report[DeviceShardingGroup],
- workqueue: ShardingWorkQueue[TestResult, DeviceShardingGroup],
+ workqueue: ShardingWorkQueue[TestResult, Device],
) -> None:
"""Finds and restarts any failing flaky tests."""
rerun_tests = report.remove_all_failing_flaky(flake_filter)
@@ -414,7 +389,7 @@
# Have to use max of one worker per re-run to ensure that the logs we collect do not
# conflate with other tests.
- queue: ShardingWorkQueue[TestResult, DeviceShardingGroup] = ShardingWorkQueue(
+ queue: ShardingWorkQueue[TestResult, Device] = ShardingWorkQueue(
fleet.get_unique_device_groups(), 1
)
try:
@@ -769,7 +744,7 @@
workqueue.join()
report = Report[DeviceShardingGroup]()
- shard_queue: ShardingWorkQueue[TestResult, DeviceShardingGroup] = ShardingWorkQueue(
+ shard_queue: ShardingWorkQueue[TestResult, Device] = ShardingWorkQueue(
fleet.get_unique_device_groups(), 4
)
try:
diff --git a/ndk/test/builder.py b/ndk/test/builder.py
index 2f445be..e62f72e 100644
--- a/ndk/test/builder.py
+++ b/ndk/test/builder.py
@@ -16,35 +16,32 @@
"""APIs for enumerating and building NDK tests."""
from __future__ import absolute_import
+import json
import logging
import os
-from pathlib import Path
import pickle
import random
import shutil
import sys
import traceback
-from typing import (
- Dict,
- List,
- Tuple,
-)
-from xml.etree import ElementTree
+from pathlib import Path
+from typing import Dict, List, Tuple
import ndk.abis
import ndk.archive
import ndk.paths
+import ndk.test.devicetest.scanner
+import ndk.test.spec
+import ndk.test.suites
+import ndk.test.ui
+import ndk.ui
from ndk.test.buildtest.case import Test
from ndk.test.buildtest.scanner import TestScanner
-import ndk.test.devicetest.scanner
from ndk.test.devices import DeviceConfig
from ndk.test.filters import TestFilter
from ndk.test.printers import Printer
from ndk.test.report import Report
-import ndk.test.spec
-import ndk.test.suites
-import ndk.test.ui
-from ndk.workqueue import LoadRestrictingWorkQueue, Worker, WorkQueue
+from ndk.workqueue import AnyWorkQueue, Worker, WorkQueue
def logger() -> logging.Logger:
@@ -162,18 +159,17 @@
nodist_scanner = ndk.test.buildtest.scanner.BuildTestScanner(
self.test_options.ndk_path, dist=False
)
- libcxx_scanner = ndk.test.buildtest.scanner.LibcxxTestScanner(
- self.test_options.ndk_path
- )
- build_api_level = None # Always use the default.
+ # This is always None for the global config while building. See the comment in
+ # the definition of BuildConfiguration.
+ build_api_level = None
for abi in self.test_spec.abis:
for toolchain_file in ndk.test.spec.CMakeToolchainFile:
- config = ndk.test.spec.BuildConfiguration(
- abi, build_api_level, toolchain_file
- )
- scanner.add_build_configuration(config)
- nodist_scanner.add_build_configuration(config)
- libcxx_scanner.add_build_configuration(config)
+ for weak_symbols in ndk.test.spec.WeakSymbolsConfig:
+ config = ndk.test.spec.BuildConfiguration(
+ abi, build_api_level, toolchain_file, weak_symbols
+ )
+ scanner.add_build_configuration(config)
+ nodist_scanner.add_build_configuration(config)
if "build" in self.test_spec.suites:
test_src = self.test_options.src_dir / "build"
@@ -181,9 +177,6 @@
if "device" in self.test_spec.suites:
test_src = self.test_options.src_dir / "device"
self.add_suite("device", test_src, scanner)
- if "libc++" in self.test_spec.suites:
- test_src = self.test_options.src_dir / "libc++"
- self.add_suite("libc++", test_src, libcxx_scanner)
def add_suite(self, name: str, path: Path, test_scanner: TestScanner) -> None:
if name in self.tests:
@@ -230,7 +223,7 @@
return result
def do_build(self, test_filters: TestFilter) -> Report[None]:
- workqueue: LoadRestrictingWorkQueue[RunTestResult] = LoadRestrictingWorkQueue()
+ workqueue = WorkQueue()
try:
for suite, tests in self.tests.items():
# Each test configuration was expanded when each test was
@@ -241,25 +234,14 @@
for test in tests:
if not test_filters.filter(test.name):
continue
-
- if test.name == "libc++":
- workqueue.add_load_restricted_task(
- _run_test,
- suite,
- test,
- self.obj_dir,
- self.dist_dir,
- test_filters,
- )
- else:
- workqueue.add_task(
- _run_test,
- suite,
- test,
- self.obj_dir,
- self.dist_dir,
- test_filters,
- )
+ workqueue.add_task(
+ _run_test,
+ suite,
+ test,
+ self.obj_dir,
+ self.dist_dir,
+ test_filters,
+ )
report = Report[None]()
self.wait_for_results(report, workqueue, test_filters)
@@ -272,11 +254,11 @@
def wait_for_results(
self,
report: Report[None],
- workqueue: LoadRestrictingWorkQueue[RunTestResult],
+ workqueue: AnyWorkQueue,
test_filters: TestFilter,
) -> None:
console = ndk.ansi.get_console()
- ui = ndk.test.ui.get_test_build_progress_ui(console, workqueue)
+ ui = ndk.ui.get_work_queue_ui(console, workqueue)
with ndk.ansi.disable_terminal_echo(sys.stdin):
with console.cursor_hide_context():
while not workqueue.finished():
@@ -318,135 +300,38 @@
TestFilter.from_string(self.test_options.test_filter),
ndk.test.devicetest.scanner.ConfigFilter(self.test_spec),
)
- workqueue: WorkQueue = WorkQueue()
- try:
- for device_version, abis in self.test_spec.devices.items():
- for abi in abis:
- workqueue.add_task(
- _make_tradefed_zip,
- self.test_options,
- test_groups,
- DeviceConfig([abi], device_version),
- )
- while not workqueue.finished():
- workqueue.get_result()
- finally:
- workqueue.terminate()
- workqueue.join()
-
-
-def _make_tradefed_zip(
- _worker: Worker,
- test_options: ndk.test.spec.TestOptions,
- test_groups: dict[
- ndk.test.spec.BuildConfiguration, list[ndk.test.devicetest.case.TestCase]
- ],
- device_config: DeviceConfig,
-) -> None:
- """Creates a TradeFed .zip file for the specified device config.
-
- Args:
- worker: The worker that invoked this task.
- test_options: Paths and other overall options for the tests.
- test_groups: The set of available test groups from which to choose matching tests.
- device_config: The device we will run the test on.
-
- Returns: Nothing.
- """
- abi = device_config.abis[0]
- api = device_config.version
-
- tree = ElementTree.parse(test_options.src_dir / "device/tradefed-template.xml")
- root = tree.getroot()
- root.attrib["description"] = f"NDK Tests for API {api}, {abi}"
-
- preparer = root.find("./target_preparer")
- assert preparer is not None
-
- arch_elem = root.find(
- "./object[@class='com.android.tradefed.testtype.suite.module.ArchModuleController']"
- )
- assert arch_elem is not None
- ElementTree.SubElement(
- arch_elem,
- "option",
- {
- "name": "arch",
- "value": ndk.abis.abi_to_arch(abi),
- },
- )
-
- test_elem = root.find("./test")
- assert test_elem is not None
-
- ElementTree.SubElement(
- test_elem,
- "option",
- {
- "name": "test-command-line",
- "key": "device-version",
- "value": f"echo ro.build.version.sdk = `getprop ro.build.version.sdk`; test `getprop ro.build.version.sdk` = {device_config.version}",
- },
- )
-
- tradefed_config_filename = f"api-{api}-{abi}-AndroidTest.config"
- files_to_zip = [tradefed_config_filename]
- for config, tests in test_groups.items():
- if config.abi != abi:
- continue
- assert config.api is not None
- if config.api > api:
- continue
- files_to_zip.append(str(config))
- ElementTree.SubElement(
- preparer,
- "option",
- {
- "name": "push-file",
- "key": str(config),
- "value": str(ndk.paths.DEVICE_TEST_BASE_DIR / str(config)),
- },
+ tests_json: dict[str, list[dict[str, str | list[int]]]] = {}
+ for config, tests in test_groups.items():
+ testlist: list[dict[str, str | list[int]]] = []
+ for test in tests:
+ testobj: dict[str, str | list[int]] = {
+ "cmd": test.cmd,
+ "name": f"{config}.{test.build_system}.{test.name}",
+ }
+ unsupported: list[int] = []
+ broken: list[int] = []
+ for device_version, abis in self.test_spec.devices.items():
+ if config.abi not in abis:
+ continue
+ # Pretend device doesn't support MTE which is the safer bet.
+ device_config = DeviceConfig([config.abi], device_version, False)
+ if test.check_unsupported(device_config) is not None:
+ unsupported.append(device_version)
+ else:
+ broken_config, _bug = test.check_broken(device_config)
+ if broken_config is not None:
+ broken.append(device_version)
+ if unsupported:
+ testobj["unsupported"] = unsupported
+ if broken:
+ testobj["broken"] = broken
+ testlist.append(testobj)
+ tests_json[str(config)] = testlist
+ json_config_path = self.test_options.out_dir / "dist" / "tests.json"
+ with json_config_path.open("w", encoding="utf-8") as outfile:
+ json.dump(tests_json, outfile, indent=2)
+ shutil.copy2(json_config_path, self.test_options.package_path.parent)
+ shutil.copy2(
+ self.test_options.src_dir.parent / "qa_config.json",
+ self.test_options.package_path.parent,
)
-
- # There's no executable bit on Windows. Mark everything executable after copying to the device.
- if sys.platform == "win32":
- ElementTree.SubElement(
- preparer,
- "option",
- {
- "name": "post-push",
- "value": "chmod -R 777 {}".format(
- str(ndk.paths.DEVICE_TEST_BASE_DIR / str(config))
- ),
- },
- )
-
- for test in tests:
- if test.check_unsupported(device_config):
- continue
- # TODO: Keep a count of the number of tests and add that to the XML file so we can manually double-check.
- broken_config, _bug = test.check_broken(device_config)
- ElementTree.SubElement(
- test_elem,
- "option",
- {
- "name": "test-command-line",
- "key": f"{config}.{test.build_system}.{test.name}",
- "value": test.cmd if broken_config is None else test.negated_cmd,
- },
- )
-
- ElementTree.indent(tree, space=" ", level=0)
-
- tradefed_config_path = test_options.out_dir / "dist" / tradefed_config_filename
- tree.write(tradefed_config_path, encoding="utf-8", xml_declaration=True)
- assert test_options.package_path is not None
- zipfile = test_options.package_path.parent / f"api-{api}-{abi}-androidTest.zip"
- if zipfile.exists():
- zipfile.unlink()
- ndk.archive.make_zip(
- zipfile,
- test_options.out_dir / "dist",
- files_to_zip,
- preserve_symlinks=True,
- )
diff --git a/tests/build/link_order/__init__.py b/ndk/test/buildtest/__init__.py
similarity index 100%
copy from tests/build/link_order/__init__.py
copy to ndk/test/buildtest/__init__.py
diff --git a/ndk/test/buildtest/case.py b/ndk/test/buildtest/case.py
index 549a6da..8313b2d 100644
--- a/ndk/test/buildtest/case.py
+++ b/ndk/test/buildtest/case.py
@@ -15,38 +15,31 @@
#
"""Build test cases."""
-import fnmatch
-from importlib.abc import Loader
import importlib.util
import logging
import multiprocessing
import os
-from pathlib import Path, PurePosixPath
import shlex
import shutil
import subprocess
+from abc import ABC, abstractmethod
+from importlib.abc import Loader
+from pathlib import Path
from subprocess import CompletedProcess
-from typing import (
- List,
- Optional,
- TextIO,
- Tuple,
- Union,
-)
-import xml.etree.ElementTree
+from typing import List, Optional, Tuple
-from ndk.abis import Abi
import ndk.ansi
-from ndk.cmake import find_cmake, find_ninja
import ndk.ext.os
import ndk.ext.subprocess
import ndk.hosts
import ndk.ndkbuild
import ndk.paths
-from ndk.test.config import LibcxxTestConfig, TestConfig
+from ndk.abis import Abi
+from ndk.cmake import find_cmake, find_ninja
+from ndk.test.config import TestConfig
from ndk.test.filters import TestFilter
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
from ndk.test.result import Failure, Skipped, Success, TestResult
+from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
def logger() -> logging.Logger:
@@ -65,7 +58,7 @@
shutil.copytree(src_dir, out_dir, ignore=shutil.ignore_patterns("__pycache__"))
-class Test:
+class Test(ABC):
def __init__(
self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
) -> None:
@@ -73,6 +66,11 @@
self.test_dir = test_dir
self.config = config
self.ndk_path = ndk_path
+ self.config = self.config.with_api(self.determine_api_level_for_config())
+
+ @abstractmethod
+ def determine_api_level_for_config(self) -> int:
+ ...
def get_test_config(self) -> TestConfig:
return TestConfig.from_test_dir(self.test_dir)
@@ -85,7 +83,7 @@
def is_negative_test(self) -> bool:
raise NotImplementedError
- def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
+ def check_broken(self) -> tuple[None, None] | tuple[str, str]:
return self.get_test_config().build_broken(self)
def check_unsupported(self) -> Optional[str]:
@@ -153,7 +151,7 @@
) -> Tuple[TestResult, List[Test]]:
raise NotImplementedError
- def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
+ def check_broken(self) -> tuple[None, None] | tuple[str, str]:
return self.get_test_config().build_broken(self)
def check_unsupported(self) -> Optional[str]:
@@ -168,6 +166,9 @@
def get_extra_ndk_build_flags(self) -> List[str]:
return self.get_test_config().extra_ndk_build_flags()
+ def get_overridden_runtime_minsdkversion(self) -> int | None:
+ return self.get_test_config().override_runtime_minsdkversion(self)
+
class PythonBuildTest(BuildTest):
"""A test that is implemented by test.py.
@@ -190,8 +191,6 @@
def __init__(
self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
) -> None:
- if config.api is None:
- config = config.with_api(ndk.abis.min_api_for_abi(config.abi))
super().__init__(name, test_dir, config, ndk_path)
if self.abi not in ndk.abis.ALL_ABIS:
@@ -203,6 +202,9 @@
except ValueError as ex:
raise ValueError(f"{self.api} is not a valid API number") from ex
+ def determine_api_level_for_config(self) -> int:
+ return ndk.abis.min_api_for_abi(self.config.abi)
+
def get_build_dir(self, out_dir: Path) -> Path:
return out_dir / str(self.config) / "test.py" / self.name
@@ -228,12 +230,8 @@
class ShellBuildTest(BuildTest):
- def __init__(
- self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
- ) -> None:
- if config.api is None:
- config = config.with_api(ndk.abis.min_api_for_abi(config.abi))
- super().__init__(name, test_dir, config, ndk_path)
+ def determine_api_level_for_config(self) -> int:
+ return ndk.abis.min_api_for_abi(self.config.abi)
def get_build_dir(self, out_dir: Path) -> Path:
return out_dir / str(self.config) / "build.sh" / self.name
@@ -300,7 +298,7 @@
if not application_mk.exists():
return None
- with open(application_mk) as application_mk_file:
+ with application_mk.open(encoding="utf-8") as application_mk_file:
for line in application_mk_file:
if line.startswith("APP_PLATFORM"):
_, platform_str = line.split(":=")
@@ -317,25 +315,29 @@
def _get_or_infer_app_platform(
- platform_from_user: Optional[int], test_dir: Path, abi: Abi
+ overridden_runtime_minsdkversion: int | None,
+ test_dir: Path,
+ abi: Abi,
) -> int:
"""Determines the platform level to use for a test using ndk-build.
Choose the platform level from, in order of preference:
- 1. Value given as argument.
+ 1. The value forced by the test_config.py using override_runtime_minsdkversion.
2. APP_PLATFORM from jni/Application.mk.
3. Default value for the target ABI.
Args:
- platform_from_user: A user provided platform level or None.
+ overridden_runtime_minsdkversion: The test's forced runtime minSdkVersion. Might
+ differ from the build API level. This is rare (probably only static
+ executables).
test_dir: The directory containing the ndk-build project.
abi: The ABI being targeted.
Returns:
The platform version the test should build against.
"""
- if platform_from_user is not None:
- return platform_from_user
+ if overridden_runtime_minsdkversion is not None:
+ return overridden_runtime_minsdkversion
minimum_version = ndk.abis.min_api_for_abi(abi)
platform_from_application_mk = _platform_from_application_mk(test_dir)
@@ -355,13 +357,16 @@
ndk_path: Path,
dist: bool,
) -> None:
- if config.api is None:
- config = config.with_api(
- _get_or_infer_app_platform(config.api, test_dir, config.abi)
- )
super().__init__(name, test_dir, config, ndk_path)
self.dist = dist
+ def determine_api_level_for_config(self) -> int:
+ return _get_or_infer_app_platform(
+ self.get_overridden_runtime_minsdkversion(),
+ self.test_dir,
+ self.config.abi,
+ )
+
def get_dist_dir(self, obj_dir: Path, dist_dir: Path) -> Path:
if self.dist:
return self.get_build_dir(dist_dir)
@@ -384,7 +389,6 @@
self.ndk_path,
self.ndk_build_flags,
self.abi,
- self.api,
)
if (failure := self.verify_no_cruft_in_dist(dist_dir, proc.args)) is not None:
return failure, []
@@ -398,13 +402,11 @@
ndk_path: Path,
ndk_build_flags: List[str],
abi: Abi,
- platform: int,
) -> CompletedProcess[str]:
_prep_build_dir(test_dir, obj_dir)
with ndk.ext.os.cd(obj_dir):
args = [
f"APP_ABI={abi}",
- f"APP_PLATFORM=android-{platform}",
f"NDK_LIBS_OUT={dist_dir}",
] + _get_jobs_args()
return ndk.ndkbuild.build(ndk_path, args + ndk_build_flags)
@@ -419,13 +421,16 @@
ndk_path: Path,
dist: bool,
) -> None:
- if config.api is None:
- config = config.with_api(
- _get_or_infer_app_platform(config.api, test_dir, config.abi)
- )
super().__init__(name, test_dir, config, ndk_path)
self.dist = dist
+ def determine_api_level_for_config(self) -> int:
+ return _get_or_infer_app_platform(
+ self.get_overridden_runtime_minsdkversion(),
+ self.test_dir,
+ self.config.abi,
+ )
+
def get_dist_dir(self, obj_dir: Path, dist_dir: Path) -> Path:
if self.dist:
return self.get_build_dir(dist_dir)
@@ -448,7 +453,6 @@
self.ndk_path,
self.cmake_flags,
self.abi,
- self.api,
self.config.toolchain_file == CMakeToolchainFile.Legacy,
)
if (failure := self.verify_no_cruft_in_dist(dist_dir, proc.args)) is not None:
@@ -463,7 +467,6 @@
ndk_path: Path,
cmake_flags: List[str],
abi: str,
- platform: int,
use_legacy_toolchain_file: bool,
) -> CompletedProcess[str]:
_prep_build_dir(test_dir, obj_dir)
@@ -484,14 +487,12 @@
"-GNinja",
f"-DCMAKE_MAKE_PROGRAM={ninja_bin}",
]
- if platform is not None:
- args.append("-DANDROID_PLATFORM=android-{}".format(platform))
if use_legacy_toolchain_file:
args.append("-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=ON")
else:
args.append("-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF")
proc = subprocess.run(
- [str(cmake_bin)] + cmake_flags + args,
+ [str(cmake_bin)] + args + cmake_flags,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
@@ -506,371 +507,3 @@
stderr=subprocess.PIPE,
encoding="utf-8",
)
-
-
-def get_xunit_reports(
- xunit_file: Path, test_base_dir: Path, config: BuildConfiguration, ndk_path: Path
-) -> List[Test]:
- tree = xml.etree.ElementTree.parse(str(xunit_file))
- root = tree.getroot()
- cases = root.findall(".//testcase")
-
- reports: List[Test] = []
- for test_case in cases:
- mangled_test_dir = test_case.get("classname")
- assert mangled_test_dir is not None
-
- case_name = test_case.get("name")
- assert case_name is not None
-
- # The classname is the path from the root of the libc++ test directory
- # to the directory containing the test (prefixed with 'libc++.')...
- mangled_path = "/".join([mangled_test_dir, case_name])
-
- # ... that has had '.' in its path replaced with '_' because xunit.
- test_matches = find_original_libcxx_test(mangled_path)
- if not test_matches:
- raise RuntimeError(f"Found no matches for test {mangled_path}")
- if len(test_matches) > 1:
- raise RuntimeError(
- "Found multiple matches for test {}: {}".format(
- mangled_path, test_matches
- )
- )
- assert len(test_matches) == 1
-
- # We found a unique path matching the xunit class/test name.
- name = test_matches[0]
- test_dir = Path(os.path.dirname(name)[len("libc++.") :])
-
- failure_nodes = test_case.findall("failure")
- if not failure_nodes:
- reports.append(
- XunitSuccess(name, test_base_dir, test_dir, config, ndk_path)
- )
- continue
-
- if len(failure_nodes) != 1:
- msg = (
- "Could not parse XUnit output: test case does not have a "
- "unique failure node: {}".format(name)
- )
- raise RuntimeError(msg)
-
- failure_node = failure_nodes[0]
- failure_text = failure_node.text
- assert failure_text is not None
- reports.append(
- XunitFailure(name, test_base_dir, test_dir, failure_text, config, ndk_path)
- )
- return reports
-
-
-def get_lit_cmd() -> Optional[List[str]]:
- # The build server doesn't install lit to a virtualenv, so use it from the
- # source location if possible.
- lit_path = ndk.paths.android_path("toolchain/llvm-project/llvm/utils/lit/lit.py")
- if lit_path.exists():
- return ["python", str(lit_path)]
- if shutil.which("lit"):
- return ["lit"]
- return None
-
-
-def find_original_libcxx_test(name: str) -> List[str]:
- """Finds the original libc++ test file given the xunit test name.
-
- LIT mangles test names to replace all periods with underscores because
- xunit. This returns all tests that could possibly match the xunit test
- name.
- """
-
- name = str(PurePosixPath(name))
-
- # LIT special cases tests in the root of the test directory (such as
- # test/nothing_to_do.pass.cpp) as "libc++.libc++/$TEST_FILE.pass.cpp" for
- # some reason. Strip it off so we can find the tests.
- if name.startswith("libc++.libc++/"):
- name = "libc++." + name[len("libc++.libc++/") :]
-
- test_prefix = "libc++."
- if not name.startswith(test_prefix):
- raise ValueError('libc++ test name must begin with "libc++."')
-
- name = name[len(test_prefix) :]
- test_pattern = name.replace("_", "?")
- matches = []
-
- # On Windows, a multiprocessing worker process does not inherit ALL_TESTS,
- # so we must scan libc++ tests in each worker.
- from ndk.test.buildtest.scanner import ( # pylint: disable=import-outside-toplevel
- LibcxxTestScanner,
- )
-
- LibcxxTestScanner.find_all_libcxx_tests()
- all_libcxx_tests = LibcxxTestScanner.ALL_TESTS
- for match in fnmatch.filter(all_libcxx_tests, test_pattern):
- matches.append(test_prefix + match)
- return matches
-
-
-class LibcxxTest(Test):
- def __init__(
- self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
- ) -> None:
- if config.api is None:
- config = config.with_api(ndk.abis.min_api_for_abi(config.abi))
- super().__init__(name, test_dir, config, ndk_path)
-
- @property
- def abi(self) -> Abi:
- return self.config.abi
-
- @property
- def api(self) -> Optional[int]:
- return self.config.api
-
- def get_build_dir(self, out_dir: Path) -> Path:
- return out_dir / str(self.config) / "libcxx" / self.name
-
- def run_lit(
- self,
- lit: List[str],
- ndk_path: Path,
- libcxx_src: Path,
- libcxx_install: Path,
- build_dir: Path,
- filters: List[str],
- ) -> None:
- arch = ndk.abis.abi_to_arch(self.abi)
- host_tag = ndk.hosts.get_host_tag()
- target = ndk.abis.clang_target(arch, self.api)
- toolchain = ndk.abis.arch_to_toolchain(arch)
-
- replacements = [
- ("abi", self.abi),
- ("api", self.api),
- ("arch", arch),
- ("host_tag", host_tag),
- ("libcxx_install", libcxx_install),
- ("libcxx_src", libcxx_src),
- ("ndk_path", ndk_path),
- ("toolchain", toolchain),
- ("triple", target),
- ("build_dir", build_dir),
- # TODO(danalbert): Migrate to the new test format.
- ("use_old_format", "true"),
- ]
- lit_cfg_args = []
- for key, value in replacements:
- lit_cfg_args.append(f"--param={key}={value}")
-
- xunit_output = build_dir / "xunit.xml"
- # Remove the xunit output so we don't wrongly report stale results when
- # the test runner itself is broken. We ignore the exit status of the
- # test runner since we handle test failure reporting ourselves, so if
- # there's an error in the test runner itself it will be ignored and the
- # previous report will be reused.
- if xunit_output.exists():
- os.remove(xunit_output)
-
- lit_args = (
- lit
- + [
- "-sv",
- "--param=build_only=True",
- "--no-progress-bar",
- "--show-all",
- f"--xunit-xml-output={xunit_output}",
- ]
- + lit_cfg_args
- )
-
- default_test_path = libcxx_src / "test"
- test_paths = list(filters)
- if not test_paths:
- test_paths.append(str(default_test_path))
- for test_path in test_paths:
- lit_args.append(test_path)
-
- # Ignore the exit code. We do most XFAIL processing outside the test
- # runner so expected failures in the test runner will still cause a
- # non-zero exit status. This "test" only fails if we encounter a Python
- # exception. Exceptions raised from our code are already caught by the
- # test runner. If that happens in LIT, the xunit output will not be
- # valid and we'll fail get_xunit_reports and raise an exception anyway.
- with open(os.devnull, "w") as dev_null:
- stdout: Optional[TextIO] = dev_null
- stderr: Optional[TextIO] = dev_null
- if logger().isEnabledFor(logging.INFO):
- stdout = None
- stderr = None
- subprocess.call(lit_args, stdout=stdout, stderr=stderr)
-
- def run(
- self, obj_dir: Path, dist_dir: Path, test_filters: TestFilter
- ) -> Tuple[TestResult, List[Test]]:
- lit = get_lit_cmd()
- if lit is None:
- return Failure(self, "Could not find lit"), []
-
- libcxx_src = ndk.paths.ANDROID_DIR / "toolchain/llvm-project/libcxx"
- if not libcxx_src.exists():
- return Failure(self, f"Expected libc++ directory at {libcxx_src}"), []
-
- build_dir = self.get_build_dir(dist_dir)
-
- if not build_dir.exists():
- build_dir.mkdir(parents=True)
-
- xunit_output = build_dir / "xunit.xml"
- libcxx_test_path = libcxx_src / "test"
- libcxx_install = (
- self.ndk_path
- / "toolchains/llvm/prebuilt"
- / ndk.hosts.get_host_tag()
- / "sysroot/usr/lib"
- / ndk.abis.arch_to_triple(ndk.abis.abi_to_arch(self.config.abi))
- )
- libcxx_so_path = libcxx_install / "libc++_shared.so"
- shutil.copy2(str(libcxx_so_path), build_dir)
-
- # The libc++ test runner's filters are path based. Assemble the path to
- # the test based on the late_filters (early filters for a libc++ test
- # would be simply "libc++", so that's not interesting at this stage).
- filters = []
- for late_filter in test_filters.late_filters:
- filter_pattern = late_filter.pattern
- if not filter_pattern.startswith("libc++."):
- continue
-
- _, _, path = filter_pattern.partition(".")
- if not os.path.isabs(path):
- path = os.path.join(libcxx_test_path, path)
-
- # If we have a filter like "libc++.std", we'll run everything in
- # std, but all our XunitReport "tests" will be filtered out. Make
- # sure we have something usable.
- if path.endswith("*"):
- # But the libc++ test runner won't like that, so strip it.
- path = path[:-1]
- elif not os.path.isfile(path):
- raise RuntimeError(f"{path} does not exist")
-
- filters.append(path)
- self.run_lit(lit, self.ndk_path, libcxx_src, libcxx_install, build_dir, filters)
-
- for root, _, files in os.walk(libcxx_test_path):
- for test_file in files:
- if not test_file.endswith(".dat"):
- continue
- test_relpath = os.path.relpath(root, libcxx_test_path)
- dest_dir = build_dir / test_relpath
- if not dest_dir.exists():
- continue
-
- shutil.copy2(str(Path(root) / test_file), dest_dir)
-
- # We create a bunch of fake tests that report the status of each
- # individual test in the xunit report.
- test_reports = get_xunit_reports(
- xunit_output, self.test_dir, self.config, self.ndk_path
- )
-
- return Success(self), test_reports
-
- def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
- # Actual results are reported individually by pulling them out of the
- # xunit output. This just reports the status of the overall test run,
- # which should be passing.
- return None, None
-
- def check_unsupported(self) -> Optional[str]:
- return None
-
- def is_negative_test(self) -> bool:
- return False
-
-
-class XunitResult(Test):
- """Fake tests so we can show a result for each libc++ test.
-
- We create these by parsing the xunit XML output from the libc++ test
- runner. For each result, we create an XunitResult "test" that simply
- returns a result for the xunit status.
-
- We don't have an ExpectedFailure form of the XunitResult because that is
- already handled for us by the libc++ test runner.
- """
-
- def __init__(
- self,
- name: str,
- test_base_dir: Path,
- test_dir: Path,
- config: BuildConfiguration,
- ndk_path: Path,
- ) -> None:
- super().__init__(name, test_dir, config, ndk_path)
- self.test_base_dir = test_base_dir
-
- @property
- def case_name(self) -> str:
- return os.path.splitext(os.path.basename(self.name))[0]
-
- def run(
- self, _out_dir: Path, _dist_dir: Path, _test_filters: TestFilter
- ) -> Tuple[TestResult, List[Test]]:
- raise NotImplementedError
-
- def get_test_config(self) -> TestConfig:
- test_config_dir = self.test_base_dir / self.test_dir
- return LibcxxTestConfig.from_test_dir(test_config_dir)
-
- def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
- config, bug = self.get_test_config().build_broken(self)
- if config is not None:
- assert bug is not None
- return config, bug
- return None, None
-
- # pylint: disable=no-self-use
- def check_unsupported(self) -> Optional[str]:
- return None
-
- def is_negative_test(self) -> bool:
- return False
-
- # pylint: enable=no-self-use
-
-
-class XunitSuccess(XunitResult):
- def get_build_dir(self, out_dir: Path) -> Path:
- raise NotImplementedError
-
- def run(
- self, _out_dir: Path, _dist_dir: Path, _test_filters: TestFilter
- ) -> Tuple[TestResult, List[Test]]:
- return Success(self), []
-
-
-class XunitFailure(XunitResult):
- def __init__(
- self,
- name: str,
- test_base_dir: Path,
- test_dir: Path,
- text: str,
- config: BuildConfiguration,
- ndk_path: Path,
- ) -> None:
- super().__init__(name, test_base_dir, test_dir, config, ndk_path)
- self.text = text
-
- def get_build_dir(self, out_dir: Path) -> Path:
- raise NotImplementedError
-
- def run(
- self, _out_dir: Path, _dist_dir: Path, _test_filters: TestFilter
- ) -> Tuple[TestResult, List[Test]]:
- return Failure(self, self.text), []
diff --git a/ndk/test/buildtest/scanner.py b/ndk/test/buildtest/scanner.py
index 09f0861..1582145 100644
--- a/ndk/test/buildtest/scanner.py
+++ b/ndk/test/buildtest/scanner.py
@@ -16,14 +16,11 @@
from __future__ import absolute_import
import glob
-import os
-from pathlib import Path, PurePosixPath
+from pathlib import Path
from typing import List, Set
-import ndk.paths
from ndk.test.buildtest.case import (
CMakeBuildTest,
- LibcxxTest,
NdkBuildTest,
PythonBuildTest,
ShellBuildTest,
@@ -111,43 +108,3 @@
CMakeBuildTest(name, path, config, self.ndk_path, self.dist)
for config in self.build_configurations
]
-
-
-class LibcxxTestScanner(TestScanner):
- ALL_TESTS: List[str] = []
- LIBCXX_SRC = ndk.paths.ANDROID_DIR / "toolchain/llvm-project/libcxx"
-
- def __init__(self, ndk_path: Path) -> None:
- self.ndk_path = ndk_path
- self.build_configurations: Set[BuildConfiguration] = set()
- LibcxxTestScanner.find_all_libcxx_tests()
-
- def add_build_configuration(self, spec: BuildConfiguration) -> None:
- self.build_configurations.add(spec)
-
- def find_tests(self, path: Path, name: str) -> List[Test]:
- return [
- LibcxxTest("libc++", path, config, self.ndk_path)
- for config in self.build_configurations
- if config.toolchain_file == CMakeToolchainFile.Default
- ]
-
- @classmethod
- def find_all_libcxx_tests(cls) -> None:
- # If we instantiate multiple LibcxxTestScanners, we still only need to
- # initialize this once. We only create these in the main thread, so
- # there's no risk of race.
- if cls.ALL_TESTS:
- return
-
- test_base_dir = cls.LIBCXX_SRC / "test"
-
- for root, _dirs, files in os.walk(test_base_dir, followlinks=True):
- for test_file in files:
- if test_file.endswith(".cpp") or test_file.endswith(".mm"):
- test_path = str(
- PurePosixPath(
- os.path.relpath(Path(root) / test_file, test_base_dir)
- )
- )
- cls.ALL_TESTS.append(test_path)
diff --git a/ndk/test/config.py b/ndk/test/config.py
index 5a46a91..b033202 100644
--- a/ndk/test/config.py
+++ b/ndk/test/config.py
@@ -13,15 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from importlib.abc import Loader
import importlib.util
+from importlib.abc import Loader
from pathlib import Path
from types import ModuleType
from typing import Any, Callable, List, Optional, Tuple, Union
from ndk.test.devices import DeviceConfig
-
# Need to refactor to resolve the circular import between this module and
# ndk.test.types.
Test = Any
@@ -116,6 +115,20 @@
"""
return False
+ @staticmethod
+ def override_runtime_minsdkversion(test: Test) -> int | None:
+ """Overrides the minSdkVersion that will be used for determining OS compat.
+
+ Static executables have the unusual build requirement that they always be
+ built with the latest API level, but are compatible with old devices. We
+ need to specify `APP_PLATFORM := latest` for those tests, but the test
+ runner needs to run them on old devices. There isn't an easy way to infer
+ this, nor are there many static executable tests, so those tests instead
+ override their minSdkVersion rather than letting the test builder infer it
+ from APP_PLATFORM.
+ """
+ return None
+
# pylint: enable=unused-argument
def __init__(self, test_config_py: Path) -> None:
@@ -166,6 +179,15 @@
except AttributeError:
self.is_negative_test = self.NullTestConfig.is_negative_test
+ try:
+ self.override_runtime_minsdkversion: Callable[
+ [Test], int | None
+ ] = self.module.override_runtime_minsdkversion # type: ignore
+ except AttributeError:
+ self.override_runtime_minsdkversion = (
+ self.NullTestConfig.override_runtime_minsdkversion
+ )
+
@classmethod
def from_test_dir(cls, test_dir: Path) -> "TestConfig":
return cls(test_dir / "test_config.py")
@@ -240,33 +262,3 @@
@classmethod
def from_test_dir(cls, test_dir: Path) -> "DeviceTestConfig":
return cls(test_dir / "test_config.py")
-
-
-class LibcxxTestConfig(DeviceTestConfig):
- """Specialization of test_config.py for libc++.
-
- The libc++ tests have multiple tests in a single directory, so we need to
- pass the test name for build_broken too.
- """
-
- class NullTestConfig(TestConfig.NullTestConfig):
- # pylint: disable=unused-argument,arguments-differ
- @staticmethod
- def build_unsupported(test: Test) -> Optional[str]:
- return None
-
- @staticmethod
- def build_broken(test: Test) -> Union[Tuple[None, None], Tuple[str, str]]:
- return None, None
-
- @staticmethod
- def run_unsupported(test: Test, device: DeviceConfig) -> Optional[str]:
- return None
-
- @staticmethod
- def run_broken(
- test: Test, device: DeviceConfig
- ) -> Union[Tuple[None, None], Tuple[str, str]]:
- return None, None
-
- # pylint: enable=unused-argument,arguments-differ
diff --git a/ndk/test/devices.py b/ndk/test/devices.py
index 4984228..d83c9b6 100644
--- a/ndk/test/devices.py
+++ b/ndk/test/devices.py
@@ -16,17 +16,17 @@
"""Device wrappers and device fleet management."""
from __future__ import annotations
-from dataclasses import dataclass
import logging
import os
-from pathlib import Path
import re
import shutil
import subprocess
-from typing import Any, Dict, List, Optional, Set
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Dict, List, Optional, Set
-from ndk.abis import Abi
import ndk.paths
+from ndk.abis import Abi
from ndk.test.spec import BuildConfiguration
from ndk.workqueue import ShardingGroup, Worker, WorkQueue
@@ -35,7 +35,7 @@
except ImportError:
import site
- site.addsitedir(str(ndk.paths.android_path("development/python-packages")))
+ site.addsitedir(str(ndk.paths.android_path("development/python-packages/adb")))
import adb # pylint: disable=import-error,ungrouped-imports
@@ -48,6 +48,7 @@
class DeviceConfig:
abis: list[Abi]
version: int
+ supports_mte: bool
def can_run_build_config(self, config: BuildConfiguration) -> bool:
assert config.api is not None
@@ -77,14 +78,15 @@
self._ro_build_id: Optional[str] = None
self._ro_build_version_sdk: Optional[str] = None
self._ro_build_version_codename: Optional[str] = None
- self._ro_debuggable: Optional[bool] = None
+ self._ro_debuggable: Optional[str] = None
self._ro_product_name: Optional[str] = None
+ self._supports_mte: bool = False
if precache:
self.cache_properties()
def config(self) -> DeviceConfig:
- return DeviceConfig(self.abis, self.version)
+ return DeviceConfig(self.abis, self.version, self.supports_mte)
def cache_properties(self) -> None:
"""Caches the device's system properties."""
@@ -109,7 +111,7 @@
for abi_prop in abi_properties:
value = self.get_prop(abi_prop)
if value is not None:
- abis.update(value.split(","))
+ abis.update([Abi(s) for s in value.split(",")])
if "x86_64" in abis:
# Don't allow ndk_translation to count as an arm test device.
@@ -118,6 +120,9 @@
abis.difference_update({"arm64-v8a", "armeabi-v7a"})
self._cached_abis = sorted(list(abis))
+ self._supports_mte = (
+ self.shell_nocheck(["grep", " mte", "/proc/cpuinfo"])[0] == 0
+ )
@property
def name(self) -> str:
@@ -169,6 +174,12 @@
def supports_pie(self) -> bool:
return self.version >= 16
+ @property
+ def supports_mte(self) -> bool:
+ self.cache_properties()
+ assert self._supports_mte is not None
+ return self._supports_mte
+
def __str__(self) -> str:
return f"android-{self.version} {self.name} {self.serial} {self.build_id}"
@@ -180,7 +191,7 @@
return hash(self.serial)
-class DeviceShardingGroup(ShardingGroup):
+class DeviceShardingGroup(ShardingGroup[Device]):
"""A collection of devices that should be identical for testing purposes.
For the moment, devices are only identical for testing purposes if they are
@@ -195,6 +206,7 @@
is_emulator: bool,
is_release: bool,
is_debuggable: bool,
+ supports_mte: bool,
) -> None:
self.devices = devices
self.abis = abis
@@ -202,6 +214,7 @@
self.is_emulator = is_emulator
self.is_release = is_release
self.is_debuggable = is_debuggable
+ self.supports_mte = supports_mte
@classmethod
def with_first_device(cls, first_device: Device) -> DeviceShardingGroup:
@@ -212,13 +225,14 @@
first_device.is_emulator,
first_device.is_release,
first_device.is_debuggable,
+ first_device.supports_mte,
)
def __str__(self) -> str:
return f'android-{self.version} {" ".join(self.abis)}'
@property
- def shards(self) -> List[Any]:
+ def shards(self) -> list[Device]:
return self.devices
def add_device(self, device: Device) -> None:
@@ -238,6 +252,8 @@
return False
if self.is_debuggable != device.is_debuggable:
return False
+ if self.supports_mte != device.supports_mte:
+ return False
return True
def __eq__(self, other: object) -> bool:
@@ -252,15 +268,13 @@
return False
if self.is_debuggable != other.is_debuggable:
return False
+ if self.supports_mte != other.supports_mte:
+ return False
if self.devices != other.devices:
print("devices not equal: {}, {}".format(self.devices, other.devices))
return False
return True
- def __lt__(self, other: object) -> bool:
- assert isinstance(other, DeviceShardingGroup)
- return (self.version, self.abis) < (other.version, other.abis)
-
def __hash__(self) -> int:
return hash(
(
@@ -270,6 +284,7 @@
self.is_debuggable,
tuple(self.abis),
tuple(self.devices),
+ self.supports_mte,
)
)
@@ -334,6 +349,12 @@
abi
] = DeviceShardingGroup.with_first_device(device)
+ # If we have a device that supports MTE, prefer that.
+ if not current_group.supports_mte and device.supports_mte:
+ self.devices[device.version][
+ abi
+ ] = DeviceShardingGroup.with_first_device(device)
+
def get_unique_device_groups(self) -> Set[DeviceShardingGroup]:
groups = set()
for version in self.get_versions():
@@ -365,6 +386,7 @@
is_emulator=False,
is_release=True,
is_debuggable=False,
+ supports_mte=False,
)
)
return missing
@@ -429,7 +451,7 @@
exclusion_list_env = os.getenv("NDK_DEVICE_EXCLUSION_LIST")
if exclusion_list_env is None:
return False
- exclusion_list = Path(exclusion_list_env).read_text().splitlines()
+ exclusion_list = Path(exclusion_list_env).read_text(encoding="utf-8").splitlines()
return device.serial in exclusion_list
diff --git a/tests/build/link_order/__init__.py b/ndk/test/devicetest/__init__.py
similarity index 100%
copy from tests/build/link_order/__init__.py
copy to ndk/test/devicetest/__init__.py
diff --git a/ndk/test/devicetest/case.py b/ndk/test/devicetest/case.py
index 6f63372..0347a64 100644
--- a/ndk/test/devicetest/case.py
+++ b/ndk/test/devicetest/case.py
@@ -14,21 +14,15 @@
# limitations under the License.
#
import logging
-from pathlib import Path, PurePosixPath
-import os
import shlex
import traceback
-from typing import (
- Optional,
- Tuple,
- Union,
-)
+from pathlib import Path, PurePosixPath
+from typing import Optional, Tuple, Union
-from ndk.test.config import DeviceTestConfig, LibcxxTestConfig
+from ndk.test.config import DeviceTestConfig
from ndk.test.devices import Device, DeviceConfig
from ndk.test.spec import BuildConfiguration
-
AdbResult = tuple[int, str, str, str]
@@ -123,9 +117,8 @@
self.executable = executable
def get_test_config(self) -> DeviceTestConfig:
- # We don't run anything in tests/build, and the libc++ tests are
- # handled by a different LibcxxTest. We can safely assume that anything
- # here is in tests/device.
+ # We don't run anything in tests/build. We can safely assume that anything here
+ # is in tests/device.
test_dir = self.test_src_dir / "device" / self.suite
return DeviceTestConfig.from_test_dir(test_dir)
@@ -142,69 +135,3 @@
return "cd {} && LD_LIBRARY_PATH={} ./{} 2>&1".format(
self.device_dir, self.device_dir, self.executable
)
-
-
-class LibcxxTestCase(TestCase):
- """A libc++ test case built by LIT.
-
- LIT's test structure doesn't map cleanly to ours; they have a hierarchical
- test structure. The top level contains a single "libc++" directory. In that
- directory is where shared libraries common to all tests are placed. That
- directory and any under it may contain test executables (always suffixed
- with ".exe") or test data (always suffixed with ".dat").
- """
-
- def __init__(
- self,
- suite: str,
- executable: str,
- test_src_dir: Path,
- config: BuildConfiguration,
- device_dir: PurePosixPath,
- device_base_dir: PurePosixPath,
- ) -> None:
- # Tests in the top level don't need any mangling to match the filters.
- if suite == "libc++":
- filter_name = executable
- else:
- filter_name = os.path.join(suite[len("libc++/") :], executable)
-
- # The executable name ends with .exe. Remove that so it matches the
- # filter that would be used to build the test.
- name = ".".join(["libc++", filter_name[:-4]])
- super().__init__(name, test_src_dir, config, "libc++", device_dir)
- self.suite = suite
- self.executable = executable
- self.device_base_dir = device_base_dir
-
- @property
- def case_name(self) -> str:
- # Executable is foo.pass.cpp.exe, we want foo.pass.
- return os.path.splitext(os.path.splitext(self.executable)[0])[0]
-
- def get_test_config(self) -> DeviceTestConfig:
- _, _, test_subdir = self.suite.partition("/")
- test_dir = self.test_src_dir / "libc++/test" / test_subdir
- return LibcxxTestConfig.from_test_dir(test_dir)
-
- def check_unsupported(self, device: DeviceConfig) -> Optional[str]:
- config = self.get_test_config().run_unsupported(self, device)
- if config is not None:
- return config
- return None
-
- def check_broken(
- self, device: DeviceConfig
- ) -> Union[Tuple[None, None], Tuple[str, str]]:
- config, bug = self.get_test_config().run_broken(self, device)
- if config is not None:
- assert bug is not None
- return config, bug
- return None, None
-
- @property
- def cmd(self) -> str:
- libcxx_so_dir = self.device_base_dir / str(self.config) / "libcxx" / "libc++"
- return "cd {} && LD_LIBRARY_PATH={} ./{} 2>&1".format(
- self.device_dir, libcxx_so_dir, self.executable
- )
diff --git a/ndk/test/devicetest/scanner.py b/ndk/test/devicetest/scanner.py
index d1d9248..30726f1 100644
--- a/ndk/test/devicetest/scanner.py
+++ b/ndk/test/devicetest/scanner.py
@@ -16,15 +16,10 @@
import logging
import os
from pathlib import Path, PurePosixPath
-from typing import (
- Any,
- Callable,
- Dict,
- List,
-)
+from typing import Callable, Dict, List
import ndk.test.builder
-from ndk.test.devicetest.case import TestCase, BasicTestCase, LibcxxTestCase
+from ndk.test.devicetest.case import BasicTestCase, TestCase
from ndk.test.filters import TestFilter
from ndk.test.spec import BuildConfiguration
@@ -82,61 +77,6 @@
return tests
-def _enumerate_libcxx_tests(
- out_dir_base: Path,
- test_src_dir: Path,
- device_base_dir: PurePosixPath,
- build_cfg: BuildConfiguration,
- build_system: str,
- test_filter: TestFilter,
-) -> List[TestCase]:
- tests: List[TestCase] = []
- tests_dir = out_dir_base / str(build_cfg) / build_system
- if not tests_dir.exists():
- return tests
-
- for root, _, files in os.walk(tests_dir):
- for test_file in files:
- if not test_file.endswith(".exe"):
- continue
- test_relpath = Path(root).relative_to(out_dir_base)
- device_dir = device_base_dir / test_relpath
- suite_name = str(PurePosixPath(Path(os.path.relpath(root, tests_dir))))
-
- # Our file has a .exe extension, but the name should match the
- # source file for the filters to work.
- test_name = test_file[:-4]
-
- # Tests in the top level don't need any mangling to match the
- # filters.
- if suite_name != "libc++":
- if not suite_name.startswith("libc++/"):
- raise ValueError(suite_name)
- # According to the test runner, these are all part of the
- # "libc++" test, and the rest of the data is the subtest name.
- # i.e. libc++/foo/bar/baz.cpp.exe is actually
- # libc++.foo/bar/baz.cpp. Matching this expectation here
- # allows us to use the same filter string for running the tests
- # as for building the tests.
- test_path = suite_name[len("libc++/") :]
- test_name = "/".join([test_path, test_name])
-
- filter_name = ".".join(["libc++", test_name])
- if not test_filter.filter(filter_name):
- continue
- tests.append(
- LibcxxTestCase(
- suite_name,
- test_file,
- test_src_dir,
- build_cfg,
- device_dir,
- device_base_dir,
- )
- )
- return tests
-
-
class ConfigFilter:
def __init__(self, test_spec: ndk.test.spec.TestSpec) -> None:
self.spec = test_spec
@@ -158,14 +98,11 @@
#
# * build.sh
# * cmake
- # * libcxx
# * ndk-build
# * test.py
#
# We need to handle some of these differently. The test.py and build.sh
- # type tests are build only, so we don't need to run them. The libc++ tests
- # are built by a test runner we don't control, so its output doesn't quite
- # match what we expect.
+ # type tests are build only, so we don't need to run them.
test_subdir_class_map: Dict[
str,
Callable[
@@ -174,7 +111,6 @@
],
] = {
"cmake": _enumerate_basic_tests,
- "libcxx": _enumerate_libcxx_tests,
"ndk-build": _enumerate_basic_tests,
}
diff --git a/ndk/test/filters.py b/ndk/test/filters.py
index 08013bf..332b044 100644
--- a/ndk/test/filters.py
+++ b/ndk/test/filters.py
@@ -26,6 +26,9 @@
class TestFilter:
+ # Needed to shut up warnings about `Test*` looking like a unittest test case.
+ __test__ = False
+
def __init__(self, patterns: List[str]) -> None:
self.early_filters: List[FilterFunc] = []
self.late_filters: List[FilterFunc] = []
diff --git a/ndk/test/result.py b/ndk/test/result.py
index ebdde7c..e9ef49e 100644
--- a/ndk/test/result.py
+++ b/ndk/test/result.py
@@ -19,7 +19,6 @@
import ndk.termcolor
-
# TODO: Need to resolve the circular import between this and ndk.test.types.
Test = Any
diff --git a/ndk/test/spec.py b/ndk/test/spec.py
index c7ade3e..5c13c86 100644
--- a/ndk/test/spec.py
+++ b/ndk/test/spec.py
@@ -16,14 +16,14 @@
"""Configuration objects for describing test runs."""
from __future__ import annotations
-from dataclasses import dataclass
import enum
import json
+from dataclasses import dataclass
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional
-from ndk.abis import Abi, LP32_ABIS, LP64_ABIS
import ndk.test.suites
+from ndk.abis import LP32_ABIS, LP64_ABIS, Abi
@enum.unique
@@ -32,6 +32,12 @@
Default = "new"
+@enum.unique
+class WeakSymbolsConfig(enum.Enum):
+ WeakAPI = "weakapi"
+ StrictAPI = "strictapi"
+
+
class TestOptions:
"""Configuration for how tests should be run."""
@@ -77,7 +83,7 @@
@classmethod
def load(cls, path: Path, abis: Optional[Iterable[Abi]] = None) -> TestSpec:
- with open(path) as config_file:
+ with path.open(encoding="utf-8") as config_file:
test_config: dict[str, Any] = json.load(config_file)
if abis is None:
abis = test_config.get("abis", ndk.abis.ALL_ABIS)
@@ -100,8 +106,16 @@
"""
abi: Abi
+ # This is always None for the global config while building. Each test will fill in
+ # the appropriate value for the test (based on `APP_PLATFORM` or similar). It is
+ # still a part of the BuildConfiguration class because we do not have separate
+ # classes for build config *input* (the BuildConfiguration created by
+ # TestBuilder.find_tests) and build config *output* (the result decided and
+ # serialized by the test, which needs to be read when the test is later run by
+ # run_tests.py).
api: Optional[int]
toolchain_file: CMakeToolchainFile
+ weak_symbol: WeakSymbolsConfig
def with_api(self, api: int) -> BuildConfiguration:
"""Creates a copy of this BuildConfiguration with a new API level.
@@ -113,7 +127,10 @@
A copy of this BuildConfiguration with the new API level.
"""
return BuildConfiguration(
- abi=self.abi, api=api, toolchain_file=self.toolchain_file
+ abi=self.abi,
+ api=api,
+ toolchain_file=self.toolchain_file,
+ weak_symbol=self.weak_symbol,
)
def __str__(self) -> str:
@@ -122,6 +139,7 @@
self.abi,
str(self.api),
self.toolchain_file.value,
+ self.weak_symbol.value,
]
)
@@ -154,20 +172,24 @@
abi += "-v8a"
_, _, rest = rest.partition("-")
- api_str, toolchain_file_str = rest.split("-")
+ api_str, toolchain_file_str, weak_symbols_str = rest.split("-")
api = int(api_str)
toolchain_file = CMakeToolchainFile(toolchain_file_str)
+ weak_symbols = WeakSymbolsConfig(weak_symbols_str)
- return BuildConfiguration(Abi(abi), api, toolchain_file)
+ return BuildConfiguration(Abi(abi), api, toolchain_file, weak_symbols)
- @staticmethod
- def get_extra_ndk_build_flags() -> list[str]:
+ def get_extra_ndk_build_flags(self) -> list[str]:
extra_flags = []
extra_flags.append("V=1")
+ if self.weak_symbol == WeakSymbolsConfig.WeakAPI:
+ extra_flags.append("APP_WEAK_API_DEFS=true")
+
return extra_flags
- @staticmethod
- def get_extra_cmake_flags() -> list[str]:
+ def get_extra_cmake_flags(self) -> list[str]:
extra_flags = []
extra_flags.append("-DCMAKE_VERBOSE_MAKEFILE=ON")
+ if self.weak_symbol == WeakSymbolsConfig.WeakAPI:
+ extra_flags.append("-DANDROID_WEAK_API_DEFS=ON")
return extra_flags
diff --git a/ndk/test/suites.py b/ndk/test/suites.py
index d882c81..bb0eb09 100644
--- a/ndk/test/suites.py
+++ b/ndk/test/suites.py
@@ -18,5 +18,4 @@
ALL_SUITES = (
"build",
"device",
- "libc++",
)
diff --git a/ndk/test/test_devices.py b/ndk/test/test_devices.py
index 3ef3ec5..18cb0bc 100644
--- a/ndk/test/test_devices.py
+++ b/ndk/test/test_devices.py
@@ -16,19 +16,20 @@
"""Tests for ndk.test.devices."""
from __future__ import absolute_import
-from typing import List, Optional
import unittest
+from typing import List
-from ndk.abis import Abi
import ndk.test.devices
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
+from ndk.abis import Abi
+from ndk.test.spec import BuildConfiguration, CMakeToolchainFile, WeakSymbolsConfig
class MockDevice(ndk.test.devices.Device):
- def __init__(self, version: int, abis: List[Abi]) -> None:
+ def __init__(self, version: int, abis: List[Abi], supports_mte: bool) -> None:
super().__init__("")
self._version = version
self._abis = abis
+ self._supports_mte = supports_mte
@property
def abis(self) -> List[Abi]:
@@ -38,49 +39,54 @@
def version(self) -> int:
return self._version
+ @property
+ def supports_mte(self) -> bool:
+ return self._supports_mte
-class TestBuildConfiguration(BuildConfiguration):
- def __init__(self, abi: Abi, api: Optional[int]):
- # The CMake toolchain file option is irrelevant for determining device
- # compatibility.
- super().__init__(abi, api, CMakeToolchainFile.Default)
+
+def make_test_build_configuration(abi: Abi, api: int) -> BuildConfiguration:
+ # The CMake toolchain file option is irrelevant for determining device
+ # compatibility.
+ return BuildConfiguration(
+ abi, api, CMakeToolchainFile.Default, WeakSymbolsConfig.WeakAPI
+ )
class DeviceTest(unittest.TestCase):
def test_can_run_build_config(self) -> None:
- jb_arm = MockDevice(16, [Abi("armeabi-v7a")])
- n_arm = MockDevice(25, [Abi("armeabi-v7a"), Abi("arm64-v8a")])
- n_intel = MockDevice(25, [Abi("x86"), Abi("x86_64")])
+ jb_arm = MockDevice(16, [Abi("armeabi-v7a")], False)
+ n_arm = MockDevice(25, [Abi("armeabi-v7a"), Abi("arm64-v8a")], False)
+ n_intel = MockDevice(25, [Abi("x86"), Abi("x86_64")], False)
- jb_arm7 = TestBuildConfiguration(Abi("armeabi-v7a"), 16)
+ jb_arm7 = make_test_build_configuration(Abi("armeabi-v7a"), 16)
# Too old, no PIE support.
self.assertTrue(jb_arm.can_run_build_config(jb_arm7))
self.assertTrue(n_arm.can_run_build_config(jb_arm7))
# Wrong ABI.
self.assertFalse(n_intel.can_run_build_config(jb_arm7))
- l_arm7 = TestBuildConfiguration(Abi("armeabi-v7a"), 21)
+ l_arm7 = make_test_build_configuration(Abi("armeabi-v7a"), 21)
# Too old.
self.assertFalse(jb_arm.can_run_build_config(l_arm7))
self.assertTrue(n_arm.can_run_build_config(l_arm7))
# Wrong ABI.
self.assertFalse(n_intel.can_run_build_config(l_arm7))
- l_arm64 = TestBuildConfiguration(Abi("arm64-v8a"), 21)
+ l_arm64 = make_test_build_configuration(Abi("arm64-v8a"), 21)
# Too old, wrong ABI.
self.assertFalse(jb_arm.can_run_build_config(l_arm64))
self.assertTrue(n_arm.can_run_build_config(l_arm64))
# Wrong ABI.
self.assertFalse(n_intel.can_run_build_config(l_arm64))
- l_intel = TestBuildConfiguration(Abi("x86_64"), 21)
+ l_intel = make_test_build_configuration(Abi("x86_64"), 21)
# Too old, wrong ABI.
self.assertFalse(jb_arm.can_run_build_config(l_intel))
# Wrong ABI.
self.assertFalse(n_arm.can_run_build_config(l_intel))
self.assertTrue(n_intel.can_run_build_config(l_intel))
- o_arm7 = TestBuildConfiguration(Abi("armeabi-v7a"), 26)
+ o_arm7 = make_test_build_configuration(Abi("armeabi-v7a"), 26)
# Too old.
self.assertFalse(jb_arm.can_run_build_config(o_arm7))
# Too old.
@@ -88,7 +94,7 @@
# Too old, wrong ABI.
self.assertFalse(n_intel.can_run_build_config(o_arm7))
- o_arm64 = TestBuildConfiguration(Abi("arm64-v8a"), 26)
+ o_arm64 = make_test_build_configuration(Abi("arm64-v8a"), 26)
# Too old.
self.assertFalse(jb_arm.can_run_build_config(o_arm64))
# Too old.
@@ -96,7 +102,7 @@
# Too old, wrong ABI.
self.assertFalse(n_intel.can_run_build_config(o_arm64))
- o_intel = TestBuildConfiguration(Abi("x86_64"), 26)
+ o_intel = make_test_build_configuration(Abi("x86_64"), 26)
# Too old, wrong ABI.
self.assertFalse(jb_arm.can_run_build_config(o_intel))
# Too old, wrong ABI.
diff --git a/ndk/test/test_paths.py b/ndk/test/test_paths.py
index 1444bdb..7459963 100644
--- a/ndk/test/test_paths.py
+++ b/ndk/test/test_paths.py
@@ -16,9 +16,8 @@
"""Tests for ndk.paths."""
from __future__ import absolute_import
-from pathlib import Path
import unittest
-
+from pathlib import Path
from unittest import mock
import ndk.config
diff --git a/ndk/test/test_report.py b/ndk/test/test_report.py
index 6fc6d9a..5e80cae 100644
--- a/ndk/test/test_report.py
+++ b/ndk/test/test_report.py
@@ -14,7 +14,6 @@
# limitations under the License.
#
"""Tests for ndk.test.report."""
-from typing import Any
import unittest
import ndk.run_tests
@@ -28,7 +27,7 @@
class ReportTest(unittest.TestCase):
def test_remove_all_failing_flaky(self) -> None:
- report = ndk.test.report.Report[Any]()
+ report = ndk.test.report.Report[None]()
# Success. Not filtered.
report.add_result("build", ndk.test.result.Success(MockTest()))
@@ -57,13 +56,5 @@
),
)
- # Flaky libc++ tests. Filtered.
- report.add_result(
- "build", ndk.test.result.Failure(MockTest("libc++.libcxx/thread/foo"), "")
- )
- report.add_result(
- "build", ndk.test.result.Failure(MockTest("libc++.std/thread/foo"), "")
- )
-
results = report.remove_all_failing_flaky(ndk.run_tests.flake_filter)
- self.assertEqual(3, len(results))
+ self.assertEqual(1, len(results))
diff --git a/ndk/test/test_spec.py b/ndk/test/test_spec.py
index ba9caa8..bac19ed 100644
--- a/ndk/test/test_spec.py
+++ b/ndk/test/test_spec.py
@@ -15,27 +15,31 @@
#
import unittest
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
+from ndk.test.spec import BuildConfiguration, CMakeToolchainFile, WeakSymbolsConfig
class BuildConfigurationTest(unittest.TestCase):
def test_from_string(self) -> None:
- config = BuildConfiguration.from_string("armeabi-v7a-16-legacy")
+ config = BuildConfiguration.from_string("armeabi-v7a-16-legacy-strictapi")
self.assertEqual("armeabi-v7a", config.abi)
self.assertEqual(16, config.api)
self.assertEqual(CMakeToolchainFile.Legacy, config.toolchain_file)
+ self.assertEqual(WeakSymbolsConfig.StrictAPI, config.weak_symbol)
- config = BuildConfiguration.from_string("arm64-v8a-21-new")
+ config = BuildConfiguration.from_string("arm64-v8a-21-new-strictapi")
self.assertEqual("arm64-v8a", config.abi)
self.assertEqual(21, config.api)
self.assertEqual(CMakeToolchainFile.Default, config.toolchain_file)
+ self.assertEqual(WeakSymbolsConfig.StrictAPI, config.weak_symbol)
- config = BuildConfiguration.from_string("x86-16-new")
+ config = BuildConfiguration.from_string("x86-16-new-strictapi")
self.assertEqual("x86", config.abi)
self.assertEqual(16, config.api)
self.assertEqual(CMakeToolchainFile.Default, config.toolchain_file)
+ self.assertEqual(WeakSymbolsConfig.StrictAPI, config.weak_symbol)
- config = BuildConfiguration.from_string("x86_64-21-new")
+ config = BuildConfiguration.from_string("x86_64-21-new-weakapi")
self.assertEqual("x86_64", config.abi)
self.assertEqual(21, config.api)
self.assertEqual(CMakeToolchainFile.Default, config.toolchain_file)
+ self.assertEqual(WeakSymbolsConfig.WeakAPI, config.weak_symbol)
diff --git a/ndk/test/test_workqueue.py b/ndk/test/test_workqueue.py
index 43d73c3..cc65ebf 100644
--- a/ndk/test/test_workqueue.py
+++ b/ndk/test/test_workqueue.py
@@ -16,14 +16,14 @@
"""Tests for ndk.workqueue."""
import multiprocessing
import os
-from queue import Queue
import signal
import sys
+import time
+import unittest
+from queue import Queue
from threading import Event
from types import FrameType
from typing import Optional
-import time
-import unittest
from ndk.workqueue import BasicWorkQueue, TaskError, Worker, WorkQueue
@@ -85,7 +85,7 @@
sleep_until_sigterm(pid_queue)
-def raise_error() -> None:
+def raise_error(_worker: Worker) -> None:
"""Raises a RuntimeError to be re-raised in the caller."""
raise RuntimeError("Error in child")
diff --git a/ndk/test/ui.py b/ndk/test/ui.py
index 08c65db..5694d6e 100644
--- a/ndk/test/ui.py
+++ b/ndk/test/ui.py
@@ -14,16 +14,15 @@
# limitations under the License.
#
"""UI classes for test output."""
-from __future__ import absolute_import
-from __future__ import print_function
+from __future__ import absolute_import, print_function
import os
from typing import Any, List
-from ndk.ansi import AnsiConsole, Console, font_bold, font_faint, font_reset
-from ndk.ui import Ui, UiRenderer, AnsiUiRenderer, NonAnsiUiRenderer, columnate
-from ndk.test.devices import DeviceShardingGroup
-from ndk.workqueue import LoadRestrictingWorkQueue, ShardingWorkQueue, Worker
+from ndk.ansi import Console, font_bold, font_faint, font_reset
+from ndk.test.devices import Device
+from ndk.ui import AnsiUiRenderer, NonAnsiUiRenderer, Ui, UiRenderer
+from ndk.workqueue import ShardingWorkQueue, Worker
class TestProgressUi(Ui):
@@ -34,7 +33,7 @@
ui_renderer: UiRenderer,
show_worker_status: bool,
show_device_groups: bool,
- workqueue: ShardingWorkQueue[Any, DeviceShardingGroup],
+ workqueue: ShardingWorkQueue[Any, Device],
) -> None:
super().__init__(ui_renderer)
self.show_worker_status = show_worker_status
@@ -64,9 +63,8 @@
)
if self.show_device_groups:
- for group in sorted(self.workqueue.task_queues.keys()):
- assert isinstance(group, DeviceShardingGroup)
- group_id = f"{len(group.devices)} devices {group}"
+ for group in sorted(self.workqueue.task_queues.keys(), key=str):
+ group_id = f"{len(group.shards)} devices {group}"
lines.append(
"{: >{width}} {}".format(
self.workqueue.task_queues[group].qsize(),
@@ -79,7 +77,7 @@
def get_test_progress_ui(
- console: Console, workqueue: ShardingWorkQueue[Any, DeviceShardingGroup]
+ console: Console, workqueue: ShardingWorkQueue[Any, Device]
) -> TestProgressUi:
ui_renderer: UiRenderer
if console.smart_console:
@@ -97,53 +95,3 @@
return TestProgressUi(
ui_renderer, show_worker_status, show_device_groups, workqueue
)
-
-
-class TestBuildProgressUi(Ui):
- NUM_TESTS_DIGITS = 6
-
- def __init__(
- self,
- ui_renderer: UiRenderer,
- show_worker_status: bool,
- workqueue: LoadRestrictingWorkQueue[Any],
- ):
- super().__init__(ui_renderer)
- self.show_worker_status = show_worker_status
- self.workqueue = workqueue
-
- def get_ui_lines(self) -> List[str]:
- lines = []
-
- if self.show_worker_status:
- for worker in self.workqueue.main_work_queue.workers:
- lines.append(worker.status)
- for worker in self.workqueue.restricted_work_queue.workers:
- lines.append(worker.status)
-
- if self.ui_renderer.console.smart_console:
- assert isinstance(self.ui_renderer.console, AnsiConsole)
- # Keep some space at the top of the UI so we can see messages.
- ui_height = self.ui_renderer.console.height - 10
- if ui_height > 0:
- lines = columnate(lines, self.ui_renderer.console.width, ui_height)
-
- lines.append(
- "{: >{width}} tests remaining".format(
- self.workqueue.num_tasks, width=self.NUM_TESTS_DIGITS
- )
- )
- return lines
-
-
-def get_test_build_progress_ui(
- console: Console, workqueue: LoadRestrictingWorkQueue[Any]
-) -> TestBuildProgressUi:
- ui_renderer: UiRenderer
- if console.smart_console:
- ui_renderer = AnsiUiRenderer(console)
- show_worker_status = True
- else:
- ui_renderer = NonAnsiUiRenderer(console)
- show_worker_status = False
- return TestBuildProgressUi(ui_renderer, show_worker_status, workqueue)
diff --git a/ndk/test_deps.py b/ndk/test_deps.py
index f23a8c3..5450f8b 100644
--- a/ndk/test_deps.py
+++ b/ndk/test_deps.py
@@ -14,12 +14,11 @@
# limitations under the License.
#
"""Test for ndk.deps."""
-from typing import Set
import unittest
+from typing import Set
-from ndk.deps import CyclicDependencyError
-from ndk.deps import DependencyManager
from ndk.builds import Module
+from ndk.deps import CyclicDependencyError, DependencyManager
class MockModule(Module):
diff --git a/ndk/test_graph.py b/ndk/test_graph.py
index 777142f..a83ddd0 100644
--- a/ndk/test_graph.py
+++ b/ndk/test_graph.py
@@ -14,8 +14,8 @@
# limitations under the License.
#
"""Test for ndk.graph."""
-from typing import cast, List, Optional
import unittest
+from typing import List, Optional, cast
import ndk.graph
diff --git a/ndk/test_ndkversionheadergenerator.py b/ndk/test_ndkversionheadergenerator.py
new file mode 100644
index 0000000..9811536
--- /dev/null
+++ b/ndk/test_ndkversionheadergenerator.py
@@ -0,0 +1,50 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from pathlib import Path
+
+from .ndkversionheadergenerator import NdkVersionHeaderGenerator
+
+
+def test_ndkversionheadergenerator_generate_str() -> None:
+ text = NdkVersionHeaderGenerator(
+ major=26, minor=0, beta=0, build_number=1234, canary=False
+ ).generate_str()
+ lines = text.splitlines()
+ assert "#define __NDK_MAJOR__ 26" in lines
+ assert "#define __NDK_MINOR__ 0" in lines
+ assert "#define __NDK_BETA__ 0" in lines
+ assert "#define __NDK_BUILD__ 1234" in lines
+ assert "#define __NDK_CANARY__ 0" in lines
+
+ text = NdkVersionHeaderGenerator(
+ major=27, minor=1, beta=2, build_number=0, canary=True
+ ).generate_str()
+ lines = text.splitlines()
+ assert "#define __NDK_MAJOR__ 27" in lines
+ assert "#define __NDK_MINOR__ 1" in lines
+ assert "#define __NDK_BETA__ 2" in lines
+ assert "#define __NDK_BUILD__ 0" in lines
+ assert "#define __NDK_CANARY__ 1" in lines
+
+
+def test_ndkversionheader_write(tmp_path: Path) -> None:
+ generator = NdkVersionHeaderGenerator(
+ major=26, minor=0, beta=0, build_number=1234, canary=False
+ )
+ text = generator.generate_str()
+ output = tmp_path / "ndk-version.h"
+ generator.write(output)
+ assert text == output.read_text()
diff --git a/ndk/testing/flag_verifier.py b/ndk/testing/flag_verifier.py
index 12615f7..4e3218e 100644
--- a/ndk/testing/flag_verifier.py
+++ b/ndk/testing/flag_verifier.py
@@ -16,13 +16,13 @@
"""Tools for verifying the presence or absence of flags in builds."""
from __future__ import annotations
-from pathlib import Path
import shutil
import subprocess
+from pathlib import Path
from typing import Optional
-from ndk.hosts import Host
import ndk.paths
+from ndk.hosts import Host
from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
@@ -36,9 +36,15 @@
"""Returns True if verification failed."""
raise NotImplementedError
- def make_test_result_tuple(self) -> tuple[bool, Optional[str]]:
+ def make_test_result_tuple(
+ self, message_prefix: str | None = None
+ ) -> tuple[bool, Optional[str]]:
"""Creates a test result tuple in the format expect by run_test."""
- return not self.failed(), self.error_message
+ if message_prefix is None:
+ message = self.error_message
+ else:
+ message = f"{message_prefix}\n{self.error_message}"
+ return not self.failed(), message
class FlagVerifierSuccess(FlagVerifierResult):
diff --git a/ndk/testing/standalone_toolchain.py b/ndk/testing/standalone_toolchain.py
index 7488c84..23052bf 100644
--- a/ndk/testing/standalone_toolchain.py
+++ b/ndk/testing/standalone_toolchain.py
@@ -15,16 +15,16 @@
#
import logging
import os
-from pathlib import Path
import shutil
import subprocess
import tempfile
import time
+from pathlib import Path
from typing import Any
import ndk.abis
-from ndk.hosts import Host
import ndk.paths
+from ndk.hosts import Host
from ndk.test.spec import BuildConfiguration
@@ -97,7 +97,6 @@
extra_args: list[str],
flags: list[str],
) -> tuple[bool, str]:
-
# On Windows, the default directory for temporary files may have a different
# (slow) configuration for security controls, indexing, etc. So we create
# temporary directories directly in "out".
diff --git a/ndk/toolchains.py b/ndk/toolchains.py
index 66cd8f5..88d7cf3 100644
--- a/ndk/toolchains.py
+++ b/ndk/toolchains.py
@@ -14,15 +14,14 @@
# limitations under the License.
#
"""APIs for accessing toolchains."""
-from pathlib import Path
import subprocess
+from pathlib import Path
from typing import List
-from ndk.hosts import Host, get_default_host
import ndk.paths
+from ndk.hosts import Host, get_default_host
-
-CLANG_VERSION = "clang-r450784d"
+CLANG_VERSION = "clang-r498229b"
HOST_TRIPLE_MAP = {
@@ -324,6 +323,7 @@
if self.target == Host.Darwin:
flags.extend(self.darwin_sdk.flags)
+ flags.append(f"-L{self.path}/lib")
else:
flags.append(f"--sysroot={self.sysroot.sysroot}")
diff --git a/ndk/tools/ndkgitprebuilts.py b/ndk/tools/ndkgitprebuilts.py
new file mode 100644
index 0000000..b403f86
--- /dev/null
+++ b/ndk/tools/ndkgitprebuilts.py
@@ -0,0 +1,403 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Script for updating the prebuilt NDK installed to a git repo.
+
+Run with `poetry run update-prebuilt-ndk`.
+"""
+from __future__ import annotations
+
+import asyncio
+import logging
+import re
+import shlex
+import shutil
+import sys
+import textwrap
+from abc import ABC, abstractmethod
+from contextlib import nullcontext
+from pathlib import Path
+from tempfile import TemporaryDirectory, mkdtemp
+from typing import ContextManager
+
+import click
+from aiohttp import ClientSession
+from fetchartifact import fetch_artifact_chunked
+
+
+def is_filesystem_case_sensitive(path: Path) -> bool:
+ """Returns True if the file system the given path belongs to is case-sensitive."""
+ if not path.exists():
+ path.mkdir(parents=True)
+ elif not path.is_dir():
+ raise ValueError(f"{path} is not a directory")
+
+ temp_dir = Path(mkdtemp(prefix=f"{path}/"))
+ try:
+ (temp_dir / "a").touch()
+ return not (temp_dir / "A").exists()
+ finally:
+ shutil.rmtree(temp_dir)
+
+
+async def run(cmd: list[str], cwd: Path | None = None) -> None:
+ """Runs and logs an asyncio subprocess."""
+ logging.debug("exec CWD=%s %s", cwd or Path.cwd(), shlex.join(cmd))
+ proc = await asyncio.create_subprocess_exec(cmd[0], *cmd[1:], cwd=cwd)
+ await proc.communicate()
+ if proc.returncode != 0:
+ raise RuntimeError(f"Command failed: CWD={cwd or Path.cwd()} {shlex.join(cmd)}")
+
+
+async def run_piped(cmd: list[str], cwd: Path | None = None) -> bytes:
+ """Runs and logs an asyncio subprocess.
+
+ stdout and stderr will be combined and returned as bytes.
+ """
+ logging.debug("exec CWD=%s %s", cwd or Path.cwd(), shlex.join(cmd))
+ proc = await asyncio.create_subprocess_exec(
+ cmd[0],
+ *cmd,
+ cwd=cwd,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.STDOUT,
+ )
+ stdout, _ = await proc.communicate()
+ return stdout
+
+
+async def run_shell(cmd: str, cwd: Path | None = None) -> None:
+ """Runs and logs an asyncio subprocess."""
+ logging.debug("shell CWD=%s %s", cwd or Path.cwd(), cmd)
+ proc = await asyncio.create_subprocess_shell(cmd, cwd=cwd)
+ await proc.communicate()
+ if proc.returncode != 0:
+ raise RuntimeError(f"Command failed: CWD={cwd or Path.cwd()} {cmd}")
+
+
+class NdkSource(ABC):
+ @abstractmethod
+ def commit_summary(self) -> str:
+ ...
+
+ @abstractmethod
+ async def download_zip(self, destination: Path) -> None:
+ ...
+
+ @abstractmethod
+ def infer_major_version(self) -> int | None:
+ """Infers the major version from the source, if possible."""
+
+ @staticmethod
+ def from_str(ndk_source: str) -> NdkSource:
+ if ndk_source.startswith("r"):
+ return ReleasedNdk(ndk_source)
+ if (path := Path(ndk_source)).exists():
+ return ZippedNdk(path)
+ return CanaryNdk(ndk_source)
+
+
+class ReleasedNdk(NdkSource):
+ def __init__(self, version: str) -> None:
+ super().__init__()
+ self.version = version
+
+ def commit_summary(self) -> str:
+ return f"Update to NDK {self.version}."
+
+ def infer_major_version(self) -> int | None:
+ pattern = r"r(\d+).*"
+ if (match := re.search(pattern, self.version)) is not None:
+ return int(match.group(1))
+ raise ValueError(
+ f"NDK version {self.version} did not match expected pattern {pattern}"
+ )
+
+ @property
+ def url(self) -> str:
+ return f"https://dl.google.com/android/repository/android-ndk-{self.version}-linux.zip"
+
+ async def download_zip(self, destination: Path) -> None:
+ logging.info("Downloading NDK from %s", self.url)
+ async with ClientSession() as session:
+ async with session.get(self.url) as response:
+ with destination.open("wb") as output:
+ async for chunk in response.content.iter_chunked(4 * 1024 * 1024):
+ output.write(chunk)
+
+
+class CanaryNdk(NdkSource):
+ def __init__(self, build_id: str) -> None:
+ super().__init__()
+ self.build_id = build_id
+
+ def commit_summary(self) -> str:
+ return f"Update to canary build {self.build_id}."
+
+ def infer_major_version(self) -> int | None:
+ return None
+
+ async def download_zip(self, destination: Path) -> None:
+ async with ClientSession() as session:
+ with destination.open("wb") as output:
+ async for chunk in fetch_artifact_chunked(
+ "linux",
+ self.build_id,
+ f"android-ndk-{self.build_id}-linux-x86_64.zip",
+ session,
+ ):
+ output.write(chunk)
+
+
+class ZippedNdk(NdkSource):
+ def __init__(self, path: Path) -> None:
+ super().__init__()
+ self.path = path
+
+ def commit_summary(self) -> str:
+ return f"(DO NOT SUBMIT) Update with local NDK."
+
+ def infer_major_version(self) -> int | None:
+ return None
+
+ async def download_zip(self, destination: Path) -> None:
+ shutil.copy(self.path, destination)
+
+
+class PrebuiltsRepo:
+ def __init__(
+ self, path: Path, ndk_major_version: int | None, ndk_source: NdkSource
+ ) -> None:
+ self.path = path
+ self.ndk_major_version = ndk_major_version
+ self.ndk_source = ndk_source
+
+ async def prepare_for_install(self, force: bool) -> None:
+ await self.ensure_latest_master(force)
+ await self.remove_contents()
+
+ async def ensure_latest_master(self, force: bool) -> None:
+ """Clones or updates the NDK prebuilt repo in self.git_repo_path."""
+ if (self.path / ".git").exists():
+ await self.update_git_repo(force)
+ else:
+ await self.clone_git_repo()
+
+ async def update_git_repo(self, force: bool) -> None:
+ """Updates the NDK prebuilt repo in self.path."""
+ if not force:
+ await self.check_if_repo_clean()
+ await self.checkout_master(force)
+ if force:
+ await self._git(["clean", "-df"])
+ await self._git(["pull"])
+
+ async def check_if_repo_clean(self) -> None:
+ """Raises if the repository has uncommitted changes."""
+ output = (await self._git_piped(["status", "--porcelain"])).decode("utf-8")
+ if output:
+ raise RuntimeError(
+ f"Cannot update {self.path} because there are uncommitted changes or"
+ f"untracked files:\n{output}"
+ )
+
+ async def checkout_master(self, force: bool) -> None:
+ """Switches to the master branch."""
+ args = ["checkout"]
+ if force:
+ args.append("-f")
+ args.append("master")
+ await self._git(args)
+
+ async def clone_git_repo(self) -> None:
+ """Clones the NDK prebuilt repo in self.git_repo_path."""
+ assert self.ndk_major_version is not None
+ repo_base = "https://android.googlesource.com/toolchain/prebuilts/ndk"
+ await run(
+ [
+ "git",
+ "clone",
+ f"{repo_base}/r{self.ndk_major_version}",
+ str(self.path),
+ ]
+ )
+
+ async def remove_contents(self) -> None:
+ await self._git(["rm", "-rf", "."])
+
+ async def _git(self, cmd: list[str]) -> None:
+ await run(["git", "-C", str(self.path)] + cmd)
+
+ async def _git_piped(self, cmd: list[str]) -> bytes:
+ return await run_piped(["git", "-C", str(self.path)] + cmd)
+
+ async def install_from(self, ndk_zip: Path) -> None:
+ await self.unzip_to_repo(ndk_zip)
+ self.fixup_install()
+ await self.create_commit()
+
+ async def unzip_to_repo(self, ndk_zip: Path) -> None:
+ assert ndk_zip.exists()
+ # Not using TemporaryDirectory because we want to make sure it's on the same
+ # filesystem as the repo so we can mv rather than cp.
+ temp_dir = self.path / ".extract"
+ if temp_dir.exists():
+ shutil.rmtree(temp_dir)
+ temp_dir.mkdir()
+ try:
+ await run(["unzip", "-d", str(temp_dir), str(ndk_zip)])
+ # We should have extracted a single directory.
+ subdirs = list(temp_dir.iterdir())
+ assert len(subdirs) == 1
+ ndk_dir = subdirs[0]
+ for item in ndk_dir.iterdir():
+ item.rename(self.path / item.name)
+ finally:
+ shutil.rmtree(temp_dir)
+
+ def fixup_install(self) -> None:
+ (self.path / "Android.mk").write_text(
+ textwrap.dedent(
+ """\
+ # Intentionally empty to prevent loading subdir Android.mk files.
+ # The distributed NDK includes a handful of Android.mk files for use
+ # with ndk-build via import-module, but without an empty Android.mk at
+ # the top level, the platform build system will try to use them.
+ """
+ )
+ )
+
+ async def create_commit(self) -> None:
+ await self.install_commit_hook()
+ await self._git(["add", "-A"])
+ message = textwrap.dedent(
+ f"""\
+ {self.ndk_source.commit_summary()}
+
+ Test: treehugger
+ Bug: None
+ """
+ )
+ await self._git(["commit", "-a", "-m", message])
+
+ async def install_commit_hook(self) -> None:
+ commit_hook_url = (
+ "https://gerrit-review.googlesource.com/tools/hooks/commit-msg"
+ )
+ await run_shell(
+ "f=`git rev-parse --git-dir`/hooks/commit-msg ; mkdir -p $(dirname $f) ; "
+ f"curl -Lo $f {commit_hook_url} ; chmod +x $f",
+ cwd=self.path,
+ )
+
+ async def upload(self) -> None:
+ await self._git(["push", "origin", "HEAD:refs/for/master"])
+
+
+class App:
+ def __init__(
+ self,
+ ndk_source: NdkSource,
+ ndk_major_version: int | None,
+ working_directory: Path,
+ force_reset_git_repo: bool,
+ ) -> None:
+ self.prebuilts_repo = PrebuiltsRepo(
+ working_directory / "git_repo", ndk_major_version, ndk_source
+ )
+ self.ndk_source = ndk_source
+ self.working_directory = working_directory
+ self.force_reset_git_repo = force_reset_git_repo
+
+ async def run(self) -> None:
+ logging.debug("Updating prebuilt NDK at %s", self.prebuilts_repo.path)
+ dest = self.working_directory / "ndk.zip"
+ await asyncio.gather(
+ self.ndk_source.download_zip(dest),
+ self.prebuilts_repo.prepare_for_install(self.force_reset_git_repo),
+ )
+ await self.prebuilts_repo.install_from(dest)
+ await self.prebuilts_repo.upload()
+
+ @staticmethod
+ @click.command()
+ @click.option(
+ "-v",
+ "--verbose",
+ count=True,
+ default=0,
+ help="Increase verbosity (repeatable).",
+ )
+ @click.option(
+ "--working-directory",
+ type=click.Path(file_okay=False, resolve_path=True, path_type=Path),
+ help=(
+ "Use the given directory as the working directory rather than a temporary "
+ "directory. Will not be cleaned up on program exit."
+ ),
+ )
+ @click.option(
+ "--ndk-major-version",
+ type=int,
+ help=(
+ "Major version of the NDK prebuilts. If --git-repo is not used, this will "
+ "determine which version of the prebuilts to clone."
+ ),
+ )
+ @click.option(
+ "-f", "--force", is_flag=True, help="Forcibly resets the state of --git-repo."
+ )
+ @click.argument("ndk_source")
+ def main(
+ working_directory: Path | None,
+ verbose: int,
+ ndk_source: str,
+ ndk_major_version: int | None,
+ force: bool,
+ ) -> None:
+ """Updates the NDK checked in to toolchain/prebuilts/ndk/$VERSION.
+
+ NDK_SOURCE is the version of the NDK to install to prebuilts. This can be
+ either an NDK version name such as r25c, which will download that release from
+ dl.google.com; a build ID, which will download that canary build from
+ ci.android.com; or a path to a local file, which will be used as-is. A local
+ file should not be used except for testing. Only release or CI artifacts should
+ ever be checked in.
+ """
+ log_levels = [logging.WARNING, logging.INFO, logging.DEBUG]
+ logging.basicConfig(level=log_levels[min(verbose, len(log_levels) - 1)])
+ ndk = NdkSource.from_str(ndk_source)
+ if ndk_major_version is None:
+ ndk_major_version = ndk.infer_major_version()
+ if ndk_major_version is None:
+ sys.exit(
+ "Could not determine NDK major version from NDK_SOURCE "
+ "({ndk_source}) and neither --git-repo nor --ndk-major-version was "
+ "used."
+ )
+
+ if working_directory is None:
+ working_directory_ctx: ContextManager[Path | str] = TemporaryDirectory()
+ else:
+ working_directory_ctx = nullcontext(working_directory)
+ with working_directory_ctx as temp_dir_str:
+ temp_dir = Path(temp_dir_str)
+ if not is_filesystem_case_sensitive(temp_dir):
+ sys.exit(
+ f"Working directory {temp_dir} is not case-sensitive. If your "
+ "system's temp directory is not case-sensitive, you must use "
+ "--working-directory."
+ )
+ asyncio.run(App(ndk, ndk_major_version, temp_dir, force).run())
diff --git a/ndk/ui.py b/ndk/ui.py
index 3bb87dd..d46c1c8 100644
--- a/ndk/ui.py
+++ b/ndk/ui.py
@@ -14,15 +14,13 @@
# limitations under the License.
#
"""UI classes for build output."""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import division
+from __future__ import absolute_import, division, print_function
import math
import os
import sys
import time
-from typing import Iterable, List, Optional, Tuple, cast
+from typing import Callable, Iterable, List, Optional, Tuple, cast
import ndk.ansi
from ndk.workqueue import AnyWorkQueue
@@ -262,3 +260,19 @@
)
)
return lines
+
+
+def finish_workqueue_with_ui(
+ workqueue: ndk.workqueue.WorkQueue,
+ ui_fn: Callable[[ndk.ansi.Console, ndk.workqueue.WorkQueue], Ui],
+) -> None:
+ console = ndk.ansi.get_console()
+ ui = ui_fn(console, workqueue)
+ with ndk.ansi.disable_terminal_echo(sys.stdin):
+ with console.cursor_hide_context():
+ ui.draw()
+ while not workqueue.finished():
+ ui.draw()
+ workqueue.get_result()
+ ui.draw()
+ ui.clear()
diff --git a/ndk/win32.py b/ndk/win32.py
index c5502e1..d1c4e86 100644
--- a/ndk/win32.py
+++ b/ndk/win32.py
@@ -15,11 +15,10 @@
#
"""Python interfaces for win32 APIs."""
from __future__ import absolute_import
-from typing import Optional
import ctypes
import ctypes.wintypes
-
+from typing import Optional
# From winnt.h
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x00002000
diff --git a/ndk/workqueue.py b/ndk/workqueue.py
index 1fdea3d..73d5e1e 100644
--- a/ndk/workqueue.py
+++ b/ndk/workqueue.py
@@ -16,33 +16,33 @@
"""Defines WorkQueue for delegating asynchronous work to subprocesses."""
from __future__ import annotations
-from abc import ABC, abstractmethod
import collections
-import itertools
import logging
import multiprocessing
import multiprocessing.managers
import os
-from queue import Queue
import signal
import sys
import traceback
+from abc import ABC, abstractmethod
+from collections.abc import Hashable
+from queue import Queue
from types import FrameType
from typing import (
Any,
Callable,
+ Concatenate,
Deque,
Dict,
Generic,
Iterable,
List,
- Mapping,
Optional,
+ ParamSpec,
TypeVar,
Union,
)
-
IS_WINDOWS = sys.platform == "win32"
@@ -214,11 +214,18 @@
logger().debug("worker %d exiting", os.getpid())
+ResultT = TypeVar("ResultT")
+ParamT = ParamSpec("ParamT")
+
+
class Task:
"""A task to be executed by a worker process."""
def __init__(
- self, func: Callable[..., Any], args: Iterable[Any], kwargs: Mapping[Any, Any]
+ self,
+ func: Callable[Concatenate[Worker, ParamT], ResultT],
+ *args: ParamT.args,
+ **kwargs: ParamT.kwargs,
) -> None:
"""Creates a task.
@@ -288,7 +295,12 @@
self.num_tasks = 0
self._spawn_workers(num_workers)
- def add_task(self, func: Callable[..., Any], *args: Any, **kwargs: Any) -> None:
+ def add_task(
+ self,
+ func: Callable[Concatenate[Worker, ParamT], ResultT],
+ *args: ParamT.args,
+ **kwargs: ParamT.kwargs,
+ ) -> None:
"""Queues up a new task for execution.
Tasks are executed in order of insertion as worker processes become
@@ -299,7 +311,7 @@
args: Arguments to be passed to the task.
kwargs: Keyword arguments to be passed to the task.
"""
- self.task_queue.put(Task(func, args, kwargs))
+ self.task_queue.put(Task(func, *args, **kwargs))
self.num_tasks += 1
def get_result(self) -> Any:
@@ -310,6 +322,27 @@
self.num_tasks -= 1
return result
+ @property
+ def has_pending_results(self) -> bool:
+ return not self.result_queue.empty()
+
+ # TODO: Make ProcessPoolWorkQueue a descendant of BaseWorkQueue and dedup.
+ # We can't trivially make this change because ProcessPoolWorkQueue isn't currently
+ # type safe for its task result, and a single instance may be used for multiple
+ # result types.
+ def get_results(self) -> list[ResultT]:
+ """Gets all pending results.
+
+ If no results are available, this will block until at least one is
+ available. It will then continue dequeing until the queue is empty, and
+ then return.
+ """
+ results: list[ResultT] = []
+ results.append(self.get_result())
+ while self.has_pending_results:
+ results.append(self.get_result())
+ return results
+
def terminate(self) -> None:
"""Terminates all worker processes."""
for worker in self.workers:
@@ -353,9 +386,6 @@
self.data = data
-ResultT = TypeVar("ResultT")
-
-
class BaseWorkQueue(ABC, Generic[ResultT]):
@abstractmethod
def get_result(self) -> ResultT:
@@ -402,7 +432,12 @@
# pylint: enable=unused-argument
- def add_task(self, func: Callable[..., ResultT], *args: Any, **kwargs: Any) -> None:
+ def add_task(
+ self,
+ func: Callable[Concatenate[Worker, ParamT], ResultT],
+ *args: ParamT.args,
+ **kwargs: ParamT.kwargs,
+ ) -> None:
"""Queues up a new task for execution.
Tasks are executed when get_result is called.
@@ -412,7 +447,7 @@
args: Arguments to be passed to the task.
kwargs: Keyword arguments to be passed to the task.
"""
- self.task_queue.append(Task(func, args, kwargs))
+ self.task_queue.append(Task(func, *args, **kwargs))
def get_result(self) -> Any:
"""Executes a task and returns the result."""
@@ -448,97 +483,24 @@
return self.num_tasks == 0
-class LoadRestrictingWorkQueue(BaseWorkQueue[ResultT]):
- """Specialized work queue for building tests.
+ShardT = TypeVar("ShardT")
- Building the libc++ tests is very demanding and we should not be running
- more than one libc++ build at a time. The LoadRestrictingWorkQueue has a
- normal task queue as well as a task queue served by only one worker.
- """
- def __init__(self, num_workers: int = multiprocessing.cpu_count()) -> None:
- self.manager = multiprocessing.Manager()
- self.result_queue = self.manager.Queue()
-
- assert num_workers >= 2
-
- self.main_task_queue = self.manager.Queue()
- self.restricted_task_queue = self.manager.Queue()
-
- self.main_work_queue = WorkQueue(
- num_workers - 1,
- task_queue=self.main_task_queue,
- result_queue=self.result_queue,
- )
-
- self.restricted_work_queue = WorkQueue(
- 1, task_queue=self.restricted_task_queue, result_queue=self.result_queue
- )
-
- self.num_tasks = 0
-
- def add_task(self, func: Callable[..., ResultT], *args: Any, **kwargs: Any) -> None:
- self.main_task_queue.put(Task(func, args, kwargs))
- self.num_tasks += 1
-
- def add_load_restricted_task(
- self, func: Callable[..., ResultT], *args: Any, **kwargs: Any
- ) -> None:
- self.restricted_task_queue.put(Task(func, args, kwargs))
- self.num_tasks += 1
-
- def get_result(self) -> Any:
- """Gets a result from the queue, blocking until one is available."""
- result = self.result_queue.get()
- if isinstance(result, TaskError):
- raise result
- self.num_tasks -= 1
- return result
-
- def terminate(self) -> None:
- self.main_work_queue.terminate()
- self.restricted_work_queue.terminate()
-
- def join(self) -> None:
- self.main_work_queue.join()
- self.restricted_work_queue.join()
-
+class ShardingGroup(Hashable, Generic[ShardT]):
@property
- def workers(self) -> List[Worker]:
- """List of workers."""
- return list(
- itertools.chain(
- self.main_work_queue.workers, self.restricted_work_queue.workers
- )
- )
-
- @property
- def has_pending_results(self) -> bool:
- return not self.result_queue.empty()
-
- def finished(self) -> bool:
- """Returns True if all tasks have completed execution."""
- return self.num_tasks == 0
-
-
-class ShardingGroup:
- @property
- def shards(self) -> List[Any]:
+ def shards(self) -> list[ShardT]:
raise NotImplementedError
-ShardingGroupType = TypeVar("ShardingGroupType", bound=ShardingGroup)
-
-
-class ShardingWorkQueue(BaseWorkQueue[ResultT], Generic[ResultT, ShardingGroupType]):
+class ShardingWorkQueue(BaseWorkQueue[ResultT], Generic[ResultT, ShardT]):
def __init__(
- self, device_groups: Iterable[ShardingGroupType], procs_per_device: int
+ self, device_groups: Iterable[ShardingGroup[ShardT]], procs_per_device: int
) -> None:
self.manager = multiprocessing.Manager()
self.result_queue = self.manager.Queue()
- self.task_queues: Dict[ShardingGroupType, Queue[Task]] = {}
+ self.task_queues: Dict[ShardingGroup[ShardT], Queue[Task]] = {}
- self.work_queues: Dict[ShardingGroupType, Dict[Any, WorkQueue]] = {}
+ self.work_queues: Dict[ShardingGroup[ShardT], Dict[Any, WorkQueue]] = {}
self.num_tasks = 0
for group in device_groups:
self.work_queues[group] = {}
@@ -553,12 +515,12 @@
def add_task(
self,
- group: ShardingGroupType,
- func: Callable[..., ResultT],
- *args: Any,
- **kwargs: Any,
+ group: ShardingGroup[ShardT],
+ func: Callable[Concatenate[Worker, ParamT], ResultT],
+ *args: ParamT.args,
+ **kwargs: ParamT.kwargs,
) -> None:
- self.task_queues[group].put(Task(func, args, kwargs))
+ self.task_queues[group].put(Task(func, *args, **kwargs))
self.num_tasks += 1
def get_result(self) -> Any:
@@ -589,4 +551,4 @@
WorkQueue = ProcessPoolWorkQueue
-AnyWorkQueue = Union[BasicWorkQueue, LoadRestrictingWorkQueue, ProcessPoolWorkQueue]
+AnyWorkQueue = Union[BasicWorkQueue, ProcessPoolWorkQueue]
diff --git a/ndk-gdb.py b/ndkgdb.py
similarity index 87%
rename from ndk-gdb.py
rename to ndkgdb.py
index 4358759..72f7520 100755
--- a/ndk-gdb.py
+++ b/ndkgdb.py
@@ -19,31 +19,30 @@
import argparse
import contextlib
+import logging
import os
-import operator
import posixpath
import signal
import subprocess
import sys
import time
-import xml.etree.cElementTree as ElementTree
+from collections.abc import Iterator
+from typing import NoReturn
+from xml.etree import ElementTree
+from xml.etree.ElementTree import Element
-import logging
-
-# Shared functions across gdbclient.py and ndk-gdb.py.
-# ndk-gdb is installed to $NDK/prebuilt/<platform>/bin
-NDK_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
-sys.path.append(os.path.join(NDK_PATH, "python-packages"))
import adb
import gdbrunner
+NDK_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
-def log(msg):
+
+def log(msg: str) -> None:
logger = logging.getLogger(__name__)
logger.info(msg)
-def enable_verbose_logging():
+def enable_verbose_logging() -> None:
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter()
@@ -55,13 +54,13 @@
logger.setLevel(logging.INFO)
-def error(msg):
+def error(msg: str) -> NoReturn:
sys.exit("ERROR: {}".format(msg))
class ArgumentParser(gdbrunner.ArgumentParser):
- def __init__(self):
- super(ArgumentParser, self).__init__()
+ def __init__(self) -> None:
+ super().__init__()
self.add_argument(
"--verbose", "-v", action="store_true", help="enable verbose mode"
)
@@ -153,7 +152,7 @@
)
-def extract_package_name(xmlroot):
+def extract_package_name(xmlroot: Element) -> str:
if "package" in xmlroot.attrib:
return xmlroot.attrib["package"]
error("Failed to find package name in AndroidManifest.xml")
@@ -162,7 +161,7 @@
ANDROID_XMLNS = "{http://schemas.android.com/apk/res/android}"
-def extract_launchable(xmlroot):
+def extract_launchable(xmlroot: Element) -> list[str]:
"""
A given application can have several activities, and each activity
can have several intent filters. We want to only list, in the final
@@ -179,6 +178,7 @@
launcher_category = "android.intent.category.LAUNCHER"
name_attrib = "{}name".format(ANDROID_XMLNS)
+ # pylint: disable=too-many-nested-blocks
for activity in application.iter("activity"):
if name_attrib not in activity.attrib:
continue
@@ -200,12 +200,12 @@
return launchable_activities
-def ndk_bin_path():
+def ndk_bin_path() -> str:
return os.path.dirname(os.path.realpath(__file__))
-def handle_args():
- def find_program(program, paths):
+def handle_args() -> argparse.Namespace:
+ def find_program(program: str, paths: list[str]) -> str | None:
"""Find a binary in paths"""
exts = [""]
if sys.platform.startswith("win"):
@@ -221,7 +221,7 @@
# FIXME: This is broken for PATH that contains quoted colons.
paths = os.environ["PATH"].replace('"', "").split(os.pathsep)
- args = ArgumentParser().parse_args()
+ args: argparse.Namespace = ArgumentParser().parse_args()
if args.tui and sys.platform.startswith("win"):
error("TUI is unsupported on Windows.")
@@ -232,7 +232,7 @@
if args.make_cmd is None:
error("Failed to find make in '{}'".format(ndk_bin))
if args.jdb_cmd is None:
- print("WARNING: Failed to find jdb on your path, defaulting to " "--nowait")
+ print("WARNING: Failed to find jdb on your path, defaulting to --nowait")
args.nowait = True
if args.verbose:
@@ -241,9 +241,10 @@
return args
-def find_project(args):
+def find_project(args: argparse.Namespace) -> str:
manifest_name = "AndroidManifest.xml"
- if args.project is not None:
+ project: str | None = args.project
+ if project is not None:
log("Using project directory: {}".format(args.project))
args.project = os.path.realpath(os.path.expanduser(args.project))
if not os.path.exists(os.path.join(args.project, manifest_name)):
@@ -264,17 +265,19 @@
current_dir = parent_dir
args.project = current_dir
log("Using project directory: {} ".format(args.project))
- args.manifest_path = os.path.join(args.project, manifest_name)
- return args.project
+ assert project is not None
+ args.manifest_path = os.path.join(project, manifest_name)
+ args.project = project
+ return project
-def canonicalize_activity(package_name, activity_name):
+def canonicalize_activity(package_name: str, activity_name: str) -> str:
if activity_name.startswith("."):
return "{}{}".format(package_name, activity_name)
return activity_name
-def parse_manifest(args):
+def parse_manifest(args: argparse.Namespace) -> None:
manifest = ElementTree.parse(args.manifest_path)
manifest_root = manifest.getroot()
package_name = extract_package_name(manifest_root)
@@ -291,12 +294,13 @@
args.package_name = package_name
-def select_target(args):
- assert args.launch != False
+def select_target(args: argparse.Namespace) -> str:
+ assert args.launch
if len(args.activities) == 0:
error("No launchable activities found.")
+ target: str
if args.launch is None:
target = args.activities[0]
@@ -316,7 +320,7 @@
@contextlib.contextmanager
-def cd(path):
+def cd(path: str) -> Iterator[None]:
curdir = os.getcwd()
os.chdir(path)
os.environ["PWD"] = path
@@ -327,7 +331,7 @@
os.chdir(curdir)
-def dump_var(args, variable, abi=None):
+def dump_var(args: argparse.Namespace, variable: str, abi: str | None = None) -> str:
make_args = [
args.make_cmd,
"--no-print-dir",
@@ -349,11 +353,14 @@
return make_output.splitlines()[-1].decode()
-def get_api_level(device):
+def get_api_level(device: adb.AndroidDevice) -> int:
# Check the device API level
try:
- api_level = int(device.get_prop("ro.build.version.sdk"))
- except (TypeError, ValueError):
+ api_str = device.get_prop("ro.build.version.sdk")
+ if api_str is None:
+ raise KeyError
+ api_level = int()
+ except (ValueError, KeyError):
error(
"Failed to find target device's supported API level.\n"
"ndk-gdb only supports devices running Android 2.2 or higher."
@@ -367,7 +374,7 @@
return api_level
-def fetch_abi(args):
+def fetch_abi(args: argparse.Namespace) -> str:
"""
Figure out the intersection of which ABIs the application is built for and
which ones the device supports, then pick the one preferred by the device,
@@ -386,7 +393,7 @@
if args.device.get_prop("ro.product.cpu.abilist") is None:
abi_props = old_abi_props
- device_abis = []
+ device_abis: list[str] = []
for key in abi_props:
value = args.device.get_prop(key)
if value is not None:
@@ -410,14 +417,15 @@
error(msg)
-def get_run_as_cmd(user, cmd):
+def get_run_as_cmd(user: str, cmd: list[str]) -> list[str]:
return ["run-as", user] + cmd
-def get_app_data_dir(args, package_name):
+def get_app_data_dir(args: argparse.Namespace, package_name: str) -> str:
cmd = ["/system/bin/sh", "-c", "pwd", "2>/dev/null"]
cmd = get_run_as_cmd(package_name, cmd)
- (rc, stdout, _) = args.device.shell_nocheck(cmd)
+ device: adb.AndroidDevice = args.device
+ (rc, stdout, _) = device.shell_nocheck(cmd)
if rc != 0:
error(
"Could not find application's data directory. Are you sure that "
@@ -440,44 +448,40 @@
return data_dir
-def abi_to_arch(abi):
+def abi_to_arch(abi: str) -> str:
if abi.startswith("armeabi"):
return "arm"
- elif abi == "arm64-v8a":
+ if abi == "arm64-v8a":
return "arm64"
- else:
- return abi
+ return abi
-def abi_to_llvm_arch(abi):
+def abi_to_llvm_arch(abi: str) -> str:
if abi.startswith("armeabi"):
return "arm"
- elif abi == "arm64-v8a":
+ if abi == "arm64-v8a":
return "aarch64"
- elif abi == "x86":
+ if abi == "x86":
return "i386"
- else:
- return "x86_64"
+ return "x86_64"
-def get_llvm_host_name():
+def get_llvm_host_name() -> str:
platform = sys.platform
if platform.startswith("win"):
return "windows-x86_64"
- elif platform.startswith("darwin"):
+ if platform.startswith("darwin"):
return "darwin-x86_64"
- else:
- return "linux-x86_64"
+ return "linux-x86_64"
-def get_python_executable(toolchain_path):
+def get_python_executable(toolchain_path: str) -> str:
if sys.platform.startswith("win"):
return os.path.join(toolchain_path, "python3", "python.exe")
- else:
- return os.path.join(toolchain_path, "python3", "bin", "python3")
+ return os.path.join(toolchain_path, "python3", "bin", "python3")
-def get_lldb_path(toolchain_path):
+def get_lldb_path(toolchain_path: str) -> str | None:
for lldb_name in ["lldb.sh", "lldb.cmd", "lldb", "lldb.exe"]:
debugger_path = os.path.join(toolchain_path, "bin", lldb_name)
if os.path.isfile(debugger_path):
@@ -485,10 +489,10 @@
return None
-def get_llvm_package_version(llvm_toolchain_dir):
+def get_llvm_package_version(llvm_toolchain_dir: str) -> str:
version_file_path = os.path.join(llvm_toolchain_dir, "AndroidVersion.txt")
try:
- version_file = open(version_file_path, "r")
+ version_file = open(version_file_path, "r", encoding="utf-8")
except IOError:
error(
"Failed to open llvm package version file: '{}'.".format(version_file_path)
@@ -499,8 +503,13 @@
def get_debugger_server_path(
- args, package_name, app_data_dir, arch, server_name, local_path
-):
+ args: argparse.Namespace,
+ package_name: str,
+ app_data_dir: str,
+ arch: str,
+ server_name: str,
+ local_path: str,
+) -> str:
app_debugger_server_path = "{}/lib/{}".format(app_data_dir, server_name)
cmd = ["ls", app_debugger_server_path, "2>/dev/null"]
cmd = get_run_as_cmd(package_name, cmd)
@@ -548,7 +557,7 @@
return remote_path
-def pull_binaries(device, out_dir, app_64bit):
+def pull_binaries(device: adb.AndroidDevice, out_dir: str, app_64bit: bool) -> None:
required_files = []
libraries = ["libc.so", "libm.so", "libdl.so"]
@@ -578,13 +587,18 @@
destination = os.path.realpath(out_dir + "/system/bin/app_process")
try:
device.pull("/system/bin/app_process32", destination)
- except:
+ except subprocess.CalledProcessError:
device.pull("/system/bin/app_process", destination)
def generate_lldb_script(
- args, sysroot, binary_path, app_64bit, jdb_pid, llvm_toolchain_dir
-):
+ args: argparse.Namespace,
+ sysroot: str,
+ binary_path: str,
+ app_64bit: bool,
+ jdb_pid: int,
+ llvm_toolchain_dir: str,
+) -> str:
lldb_commands = []
solib_search_paths = [
"{}/system/bin".format(sysroot),
@@ -635,7 +649,7 @@
if args.exec_file is not None:
try:
- exec_file = open(args.exec_file, "r")
+ exec_file = open(args.exec_file, "r", encoding="utf-8")
except IOError:
error("Failed to open lldb exec file: '{}'.".format(args.exec_file))
@@ -646,8 +660,13 @@
def generate_gdb_script(
- args, sysroot, binary_path, app_64bit, jdb_pid, connect_timeout=5
-):
+ args: argparse.Namespace,
+ sysroot: str,
+ binary_path: str,
+ app_64bit: bool,
+ jdb_pid: int,
+ connect_timeout: int = 5,
+) -> str:
if sys.platform.startswith("win"):
# GDB expects paths to use forward slashes.
sysroot = sysroot.replace("\\", "/")
@@ -656,12 +675,12 @@
gdb_commands = "set osabi GNU/Linux\n"
gdb_commands += "file '{}'\n".format(binary_path)
- solib_search_path = [sysroot, "{}/system/bin".format(sysroot)]
+ solib_search_paths = [sysroot, "{}/system/bin".format(sysroot)]
if app_64bit:
- solib_search_path.append("{}/system/lib64".format(sysroot))
+ solib_search_paths.append("{}/system/lib64".format(sysroot))
else:
- solib_search_path.append("{}/system/lib".format(sysroot))
- solib_search_path = os.pathsep.join(solib_search_path)
+ solib_search_paths.append("{}/system/lib".format(sysroot))
+ solib_search_path = os.pathsep.join(solib_search_paths)
gdb_commands += "set solib-absolute-prefix {}\n".format(sysroot)
gdb_commands += "set solib-search-path {}\n".format(solib_search_path)
@@ -719,7 +738,7 @@
if args.exec_file is not None:
try:
- exec_file = open(args.exec_file, "r")
+ exec_file = open(args.exec_file, "r", encoding="utf-8")
except IOError:
error("Failed to open GDB exec file: '{}'.".format(args.exec_file))
@@ -729,8 +748,9 @@
return gdb_commands
-def start_jdb(adb_path, serial, jdb_cmd, pid, verbose):
- pid = int(pid)
+def start_jdb(argv_subset: list[str]) -> None:
+ adb_path, serial, jdb_cmd, pid_str, verbose = argv_subset
+ pid = int(pid_str)
device = adb.get_device(serial, adb_path=adb_path)
if verbose == "True":
enable_verbose_logging()
@@ -745,26 +765,32 @@
jdb_port = 65534
device.forward("tcp:{}".format(jdb_port), "jdwp:{}".format(pid))
- jdb_cmd = [
+ jdb_args = [
jdb_cmd,
"-connect",
"com.sun.jdi.SocketAttach:hostname=localhost,port={}".format(jdb_port),
]
- flags = subprocess.CREATE_NEW_PROCESS_GROUP if windows else 0
+ if sys.platform == "win32":
+ flags = subprocess.CREATE_NEW_PROCESS_GROUP
+ else:
+ flags = 0
jdb = subprocess.Popen(
- jdb_cmd,
+ jdb_args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
creationflags=flags,
+ text=True,
)
+ assert jdb.stdin is not None
+ assert jdb.stdout is not None
# Wait until jdb can communicate with the app. Once it can, the app will
# start polling for a Java debugger (e.g. every 200ms). We need to wait
# a while longer then so that the app notices jdb.
jdb_magic = "__verify_jdb_has_started__"
- jdb.stdin.write('print "{}"\n'.format(jdb_magic).encode("utf-8"))
+ jdb.stdin.write('print "{}"\n'.format(jdb_magic))
saw_magic_str = False
while True:
line = jdb.stdout.readline()
@@ -782,9 +808,10 @@
log("error: did not find magic string in JDB output.")
-def main():
+def main() -> None:
if sys.argv[1:2] == ["--internal-wakeup-pid-with-jdb"]:
- return start_jdb(*sys.argv[2:])
+ start_jdb(sys.argv[2:])
+ return
args = handle_args()
device = args.device
@@ -876,7 +903,7 @@
kill_pids = gdbrunner.get_pids(device, debugger_server_path)
if args.launch:
kill_pids += gdbrunner.get_pids(device, pkg_name)
- kill_pids = map(str, kill_pids)
+ kill_pids = [str(pid) for pid in kill_pids]
if kill_pids:
log("Killing processes: {}".format(", ".join(kill_pids)))
device.shell_nocheck(["run-as", pkg_name, "kill", "-9"] + kill_pids)
diff --git a/ndk-stack.py b/ndkstack.py
similarity index 84%
rename from ndk-stack.py
rename to ndkstack.py
index 2e7aa8f..0721bff 100644
--- a/ndk-stack.py
+++ b/ndkstack.py
@@ -18,7 +18,7 @@
See https://developer.android.com/ndk/guides/ndk-stack for more information.
"""
-from __future__ import print_function
+from __future__ import annotations
import argparse
import os
@@ -28,6 +28,7 @@
import sys
import tempfile
import zipfile
+from pathlib import Path
EXE_SUFFIX = ".exe" if os.name == "nt" else ""
@@ -35,20 +36,20 @@
class TmpDir:
"""Manage temporary directory creation."""
- def __init__(self):
- self._tmp_dir = None
+ def __init__(self) -> None:
+ self._tmp_dir: str | None = None
- def delete(self):
+ def delete(self) -> None:
if self._tmp_dir:
shutil.rmtree(self._tmp_dir)
- def get_directory(self):
+ def get_directory(self) -> str:
if not self._tmp_dir:
self._tmp_dir = tempfile.mkdtemp()
return self._tmp_dir
-def get_ndk_paths():
+def get_ndk_paths() -> tuple[str, str, str]:
"""Parse and find all of the paths of the ndk
Returns: Three values:
@@ -57,17 +58,25 @@
The platform name (eg linux-x86_64).
"""
+ # ndk-stack is installed as a zipped Python application (created with zipapp). The
+ # behavior of __file__ when Python runs a zip file doesn't appear to be documented,
+ # but experimentally for this case it will be:
+ #
+ # $NDK/prebuilt/darwin-x86_64/bin/ndkstack.pyz/ndkstack.py
+ #
# ndk-stack is installed to $NDK/prebuilt/<platform>/bin, so from
# `android-ndk-r18/prebuilt/linux-x86_64/bin/ndk-stack`...
# ...get `android-ndk-r18/`:
- ndk_bin = os.path.dirname(os.path.realpath(__file__))
- ndk_root = os.path.abspath(os.path.join(ndk_bin, "../../.."))
+ path_in_zipped_app = Path(__file__)
+ zip_root = path_in_zipped_app.parent
+ ndk_bin = zip_root.parent
+ ndk_root = ndk_bin.parent.parent.parent
# ...get `linux-x86_64`:
- ndk_host_tag = os.path.basename(os.path.abspath(os.path.join(ndk_bin, "../")))
- return (ndk_root, ndk_bin, ndk_host_tag)
+ ndk_host_tag = ndk_bin.parent.name
+ return (str(ndk_root), str(ndk_bin), str(ndk_host_tag))
-def find_llvm_symbolizer(ndk_root, ndk_bin, ndk_host_tag):
+def find_llvm_symbolizer(ndk_root: str, ndk_bin: str, ndk_host_tag: str) -> str:
"""Finds the NDK llvm-symbolizer(1) binary.
Returns: An absolute path to llvm-symbolizer(1).
@@ -89,7 +98,7 @@
raise OSError("Unable to find llvm-symbolizer")
-def find_readelf(ndk_root, ndk_bin, ndk_host_tag):
+def find_readelf(ndk_root: str, ndk_bin: str, ndk_host_tag: str) -> str | None:
"""Finds the NDK readelf(1) binary.
Returns: An absolute path to readelf(1).
@@ -112,7 +121,7 @@
return None
-def get_build_id(readelf_path, elf_file):
+def get_build_id(readelf_path: str, elf_file: str) -> str | None:
"""Get the GNU build id note from an elf file.
Returns: The build id found or None if there is no build id or the
@@ -129,12 +138,15 @@
return None
-def get_zip_info_from_offset(zip_file, offset):
+def get_zip_info_from_offset(
+ zip_file: zipfile.ZipFile, offset: int
+) -> zipfile.ZipInfo | None:
"""Get the ZipInfo object from a zip file.
Returns: A ZipInfo object found at the 'offset' into the zip file.
Returns None if no file can be found at the given 'offset'.
"""
+ assert zip_file.filename is not None
file_size = os.stat(zip_file.filename).st_size
if offset >= file_size:
@@ -180,7 +192,13 @@
# We're deliberately very loose because NDK users are likely to be
# looking at crashes on ancient OS releases.
# TODO: support asan stacks too?
- _line_re = re.compile(r".* +(#[0-9]+) +pc ([0-9a-f]+) +(([^ ]+).*)")
+ #
+ # The PC will begin with 0x for some traces. That's not the norm, but we've had a
+ # report of traces with that format being provided by the Play console. Presumably
+ # either Play is rewriting those (though I can't imagine why they'd be doing that),
+ # or some OEM has altered the format of the crash output.
+ # See https://github.com/android/ndk/issues/1898.
+ _line_re = re.compile(r".* +(#[0-9]+) +pc (?:0x)?([0-9a-f]+) +(([^ ]+).*)")
_sanitizer_line_re = re.compile(
r".* +(#[0-9]+) +0x[0-9a-f]* +\(([^ ]+)\+0x([0-9a-f]+)\)"
)
@@ -189,16 +207,20 @@
_build_id_re = re.compile(r"\(BuildId:\s+([0-9a-f]+)\)")
@classmethod
- def from_line(cls, line):
+ def from_line(cls, line: str) -> FrameInfo | None:
m = FrameInfo._line_re.match(line)
if m:
- return cls(*m.group(1, 2, 3, 4))
+ num, pc, tail, elf_file = m.group(1, 2, 3, 4)
+ return cls(num, pc, tail, elf_file)
m = FrameInfo._sanitizer_line_re.match(line)
if m:
- return cls(*m.group(1, 3, 2, 2), sanitizer=True)
+ num, pc, tail, elf_file = m.group(1, 3, 2, 2)
+ return cls(num, pc, tail, elf_file, sanitizer=True)
return None
- def __init__(self, num, pc, tail, elf_file, sanitizer=False):
+ def __init__(
+ self, num: str, pc: str, tail: str, elf_file: str, sanitizer: bool = False
+ ) -> None:
self.num = num
self.pc = pc
self.tail = tail
@@ -218,7 +240,7 @@
self.container_file = None
m = FrameInfo._offset_re.search(self.tail)
if m:
- self.offset = int(m.group(1), 16)
+ self.offset: int | None = int(m.group(1), 16)
else:
self.offset = None
m = FrameInfo._build_id_re.search(self.tail)
@@ -227,7 +249,9 @@
else:
self.build_id = None
- def verify_elf_file(self, readelf_path, elf_file_path, display_elf_path):
+ def verify_elf_file(
+ self, readelf_path: str | None, elf_file_path: str, display_elf_path: str
+ ) -> bool:
"""Verify if the elf file is valid.
Returns: True if the elf file exists and build id matches (if it exists).
@@ -244,7 +268,9 @@
return False
return True
- def get_elf_file(self, symbol_dir, readelf_path, tmp_dir):
+ def get_elf_file(
+ self, symbol_dir: str, readelf_path: str | None, tmp_dir: TmpDir
+ ) -> str | None:
"""Get the path to the elf file represented by this frame.
Returns: The path to the elf file if it is valid, or None if
@@ -265,6 +291,7 @@
symbol_dir, os.path.basename(self.container_file)
)
with zipfile.ZipFile(apk_file_path) as zip_file:
+ assert self.offset is not None
zip_info = get_zip_info_from_offset(zip_file, self.offset)
if not zip_info:
return None
@@ -280,6 +307,7 @@
# #08 pc 00cbed9c GoogleCamera.apk (offset 0x6e32000)
apk_file_path = os.path.join(symbol_dir, elf_file)
with zipfile.ZipFile(apk_file_path) as zip_file:
+ assert self.offset is not None
zip_info = get_zip_info_from_offset(zip_file, self.offset)
if not zip_info:
return None
@@ -315,7 +343,7 @@
return None
-def main(argv):
+def main(argv: list[str] | None = None) -> None:
""" "Program entry point."""
parser = argparse.ArgumentParser(
description="Symbolizes Android crashes.",
@@ -357,6 +385,8 @@
symbolize_proc = subprocess.Popen(
symbolize_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
+ assert symbolize_proc.stdin is not None
+ assert symbolize_proc.stdout is not None
banner = "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***"
in_crash = False
saw_frame = False
@@ -418,6 +448,8 @@
args.input.close()
tmp_dir.delete()
if symbolize_proc:
+ assert symbolize_proc.stdin is not None
+ assert symbolize_proc.stdout is not None
symbolize_proc.stdin.close()
symbolize_proc.stdout.close()
symbolize_proc.kill()
@@ -425,4 +457,4 @@
if __name__ == "__main__":
- main(sys.argv[1:])
+ main()
diff --git a/parse_elfnote.py b/parse_elfnote.py
index 023cde3..6be45c0 100755
--- a/parse_elfnote.py
+++ b/parse_elfnote.py
@@ -26,12 +26,14 @@
#
from __future__ import division, print_function
+
import argparse
import logging
+import shutil
import struct
import subprocess
import sys
-
+from pathlib import Path
SEC_NAME = ".note.android.ident"
NDK_RESERVED_SIZE = 64
@@ -112,8 +114,8 @@
# Get the offset to a section from the output of readelf
-def get_section_pos(sec_name, file_path):
- cmd = ["readelf", "--sections", "-W", file_path]
+def get_section_pos(readelf: Path, sec_name: str, file_path: str) -> tuple[int, int]:
+ cmd = [readelf, "--sections", "-W", file_path]
output = subprocess.check_output(cmd)
lines = output.decode("utf-8").splitlines()
for line in lines:
@@ -139,6 +141,51 @@
sys.exit("error: failed to find section: {}".format(sec_name))
+def get_ndk_install_path() -> Path | None:
+ try:
+ import ndk.paths # pylint: disable=import-outside-toplevel
+
+ path = ndk.paths.get_install_path()
+ if path.exists():
+ return path
+ return None
+ except ImportError:
+ return None
+
+
+def readelf_from_ndk(ndk: Path) -> Path:
+ if not ndk.exists():
+ raise ValueError(f"--ndk is {ndk} but that path does not exist")
+ prebuilt_dir = ndk / "toolchains/llvm/prebuilt"
+ bins = list(prebuilt_dir.glob("*/bin"))
+ if not bins:
+ raise RuntimeError(f"{prebuilt_dir} contains no */bin")
+ if len(bins) != 1:
+ raise RuntimeError(f"{prebuilt_dir} contains more than one */bin")
+ bin_dir = bins[0]
+
+ readelf = (bin_dir / "llvm-readelf").with_suffix(
+ ".exe" if sys.platform == "win32" else ""
+ )
+ if not readelf.exists():
+ raise RuntimeError(f"{readelf} does not exist")
+ return readelf
+
+
+def find_readelf(ndk: Path | None) -> Path:
+ if ndk is not None:
+ return readelf_from_ndk(ndk)
+ if (install_path := get_ndk_install_path()) is not None:
+ return readelf_from_ndk(install_path)
+ if (readelf := shutil.which("llvm-readelf")) is not None:
+ return Path(readelf)
+ if (readelf := shutil.which("readelf")) is not None:
+ return Path(readelf)
+ raise RuntimeError(
+ "Could not find llvm-readelf or readelf in PATH and could find find any NDK"
+ )
+
+
def parse_args():
"""Parses command line arguments."""
parser = argparse.ArgumentParser()
@@ -151,6 +198,11 @@
default=0,
help="Increase logging verbosity.",
)
+ parser.add_argument(
+ "--ndk",
+ type=Path,
+ help="Path to the NDK. If given, the NDK's llvm-readelf will be used.",
+ )
return parser.parse_args()
@@ -165,8 +217,10 @@
file_path = args.file_path
+ readelf = find_readelf(args.ndk)
+
with open(file_path, "rb") as obj_file:
- (sec_off, sec_size) = get_section_pos(SEC_NAME, file_path)
+ (sec_off, sec_size) = get_section_pos(readelf, SEC_NAME, file_path)
obj_file.seek(sec_off)
sec_data = obj_file.read(sec_size)
@@ -176,7 +230,7 @@
print("----------ABI INFO----------")
if len(sec_data) == 0:
logger().warning("%s section is empty", SEC_NAME)
- for (name, kind, desc) in iterate_notes(sec_data):
+ for name, kind, desc in iterate_notes(sec_data):
if (name, kind) == (b"Android", 1):
dump_android_ident_note(desc)
else:
diff --git a/poetry.lock b/poetry.lock
index 5e38845..6541cd0 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,25 +1,174 @@
+# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
+
+[[package]]
+name = "adb"
+version = "0.0.1"
+description = "A Python interface to the Android Debug Bridge."
+category = "dev"
+optional = false
+python-versions = "*"
+files = []
+develop = false
+
+[package.source]
+type = "directory"
+url = "../development/python-packages/adb"
+
+[[package]]
+name = "aiohttp"
+version = "3.8.4"
+description = "Async http client/server framework (asyncio)"
+category = "dev"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"},
+ {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"},
+ {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"},
+ {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"},
+ {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"},
+ {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"},
+ {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"},
+ {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"},
+ {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"},
+ {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"},
+ {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"},
+ {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"},
+ {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"},
+ {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"},
+ {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"},
+ {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"},
+ {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"},
+ {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"},
+ {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"},
+ {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"},
+ {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"},
+ {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"},
+ {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"},
+ {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"},
+ {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"},
+ {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"},
+ {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"},
+ {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"},
+ {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"},
+ {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"},
+ {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"},
+]
+
+[package.dependencies]
+aiosignal = ">=1.1.2"
+async-timeout = ">=4.0.0a3,<5.0"
+attrs = ">=17.3.0"
+charset-normalizer = ">=2.0,<4.0"
+frozenlist = ">=1.1.1"
+multidict = ">=4.5,<7.0"
+yarl = ">=1.0,<2.0"
+
+[package.extras]
+speedups = ["Brotli", "aiodns", "cchardet"]
+
+[[package]]
+name = "aiosignal"
+version = "1.3.1"
+description = "aiosignal: a list of registered asynchronous callbacks"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
+ {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
+]
+
+[package.dependencies]
+frozenlist = ">=1.1.0"
+
[[package]]
name = "astroid"
-version = "2.12.5"
+version = "2.15.4"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = ">=3.7.2"
+files = [
+ {file = "astroid-2.15.4-py3-none-any.whl", hash = "sha256:a1b8543ef9d36ea777194bc9b17f5f8678d2c56ee6a45b2c2f17eec96f242347"},
+ {file = "astroid-2.15.4.tar.gz", hash = "sha256:c81e1c7fbac615037744d067a9bb5f9aeb655edf59b63ee8b59585475d6f80d8"},
+]
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
wrapt = [
{version = ">=1.11,<2", markers = "python_version < \"3.11\""},
{version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
]
[[package]]
-name = "atomicwrites"
-version = "1.4.1"
-description = "Atomic file writes."
+name = "async-timeout"
+version = "4.0.2"
+description = "Timeout context manager for asyncio programs"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.6"
+files = [
+ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
[[package]]
name = "attrs"
@@ -28,27 +177,59 @@
category = "dev"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
+ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
+]
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
+dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
+docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
+tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
+tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]]
name = "black"
-version = "22.6.0"
+version = "23.3.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
-python-versions = ">=3.6.2"
+python-versions = ">=3.7"
+files = [
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
+ {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
+ {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
+ {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
+ {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
+ {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
+ {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
+ {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
+ {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
+ {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
+ {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
+ {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
+ {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
+ {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
+]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
@@ -57,12 +238,101 @@
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
+name = "charset-normalizer"
+version = "3.1.0"
+description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+category = "dev"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
+ {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
+ {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
+ {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
+ {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
+ {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
+ {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
+]
+
+[[package]]
name = "click"
version = "8.1.3"
description = "Composable command line interface toolkit"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
@@ -74,47 +344,308 @@
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"},
+ {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.3.1"
+description = "Code coverage measurement for Python"
+category = "dev"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"},
+ {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"},
+ {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"},
+ {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"},
+ {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"},
+ {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"},
+ {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"},
+ {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"},
+ {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"},
+ {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"},
+ {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"},
+ {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"},
+ {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"},
+ {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"},
+ {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"},
+ {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"},
+ {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"},
+ {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"},
+ {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"},
+ {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"},
+ {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"},
+ {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"},
+ {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"},
+ {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"},
+ {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"},
+ {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"},
+ {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"},
+ {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"},
+ {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"},
+ {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"},
+ {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"},
+ {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"},
+ {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"},
+ {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"},
+ {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"},
+ {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"},
+ {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"},
+ {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"},
+ {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"},
+ {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"},
+ {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"},
+ {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"},
+ {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"},
+ {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"},
+ {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"},
+ {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"},
+ {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"},
+ {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"},
+ {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"},
+ {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"},
+ {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"},
+ {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"},
+]
+
+[package.dependencies]
+tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
+
+[package.extras]
+toml = ["tomli"]
[[package]]
name = "dill"
-version = "0.3.5.1"
+version = "0.3.6"
description = "serialize all of python"
category = "dev"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
+python-versions = ">=3.7"
+files = [
+ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
+ {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
+]
[package.extras]
graph = ["objgraph (>=1.7.2)"]
[[package]]
-name = "iniconfig"
+name = "exceptiongroup"
version = "1.1.1"
-description = "iniconfig: brain-dead simple config-ini parsing"
+description = "Backport of PEP 654 (exception groups)"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
+ {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
+[[package]]
+name = "fetchartifact"
+version = "0.1.0"
+description = "Python library for http://go/fetchartifact."
+category = "dev"
+optional = false
+python-versions = "^3.9"
+files = []
+develop = true
+
+[package.source]
+type = "directory"
+url = "../development/python-packages/fetchartifact"
+
+[[package]]
+name = "frozenlist"
+version = "1.3.3"
+description = "A list-like structure which implements collections.abc.MutableSequence"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"},
+ {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"},
+ {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"},
+ {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"},
+ {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"},
+ {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"},
+ {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"},
+ {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"},
+ {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"},
+ {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"},
+ {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"},
+ {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"},
+ {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"},
+ {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"},
+ {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"},
+ {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"},
+ {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"},
+]
+
+[[package]]
+name = "gdbrunner"
+version = "0.0.1"
+description = "Common helpers of ndk-gdb and gdbclient."
category = "dev"
optional = false
python-versions = "*"
+files = []
+develop = false
+
+[package.source]
+type = "directory"
+url = "../development/python-packages/gdbrunner"
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "dev"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
[[package]]
name = "isort"
-version = "5.10.1"
+version = "5.12.0"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
-python-versions = ">=3.6.1,<4.0"
+python-versions = ">=3.8.0"
+files = [
+ {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
+ {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
+]
[package.extras]
-pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
-requirements_deprecated_finder = ["pipreqs", "pip-api"]
-colors = ["colorama (>=0.4.3,<0.5.0)"]
+colors = ["colorama (>=0.4.3)"]
+pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]]
name = "lazy-object-proxy"
-version = "1.7.1"
+version = "1.9.0"
description = "A fast and thorough lazy object proxy."
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+files = [
+ {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"},
+ {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"},
+ {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"},
+ {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"},
+ {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"},
+ {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"},
+]
[[package]]
name = "mccabe"
@@ -123,51 +654,177 @@
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
+
+[[package]]
+name = "multidict"
+version = "6.0.4"
+description = "multidict implementation"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
+ {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
+ {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
+ {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
+ {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
+ {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
+ {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
+ {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
+ {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
+ {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
+ {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
+ {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
+ {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
+ {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
+ {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
+ {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
+ {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
+ {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
+ {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
+ {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
+ {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
+ {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
+ {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
+ {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
+ {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
+ {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
+ {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+]
[[package]]
name = "mypy"
-version = "0.971"
+version = "1.2.0"
description = "Optional static typing for Python"
category = "dev"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
+files = [
+ {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"},
+ {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"},
+ {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"},
+ {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"},
+ {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"},
+ {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"},
+ {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"},
+ {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"},
+ {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"},
+ {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"},
+ {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"},
+ {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"},
+ {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"},
+ {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"},
+ {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"},
+ {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"},
+ {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"},
+ {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"},
+ {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"},
+ {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"},
+ {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"},
+ {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"},
+ {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"},
+ {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"},
+ {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"},
+ {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"},
+]
[package.dependencies]
-mypy-extensions = ">=0.4.3"
+mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=3.10"
[package.extras]
dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
python2 = ["typed-ast (>=1.4.0,<2)"]
reports = ["lxml"]
[[package]]
name = "mypy-extensions"
-version = "0.4.3"
-description = "Experimental type system extensions for programs checked with the mypy typechecker."
+version = "1.0.0"
+description = "Type system extensions for programs checked with the mypy type checker."
category = "dev"
optional = false
-python-versions = "*"
+python-versions = ">=3.5"
+files = [
+ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
+ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
+]
[[package]]
name = "packaging"
-version = "21.3"
+version = "23.1"
description = "Core utilities for Python packages"
category = "dev"
optional = false
-python-versions = ">=3.6"
-
-[package.dependencies]
-pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
+python-versions = ">=3.7"
+files = [
+ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
+ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+]
[[package]]
name = "pathspec"
-version = "0.10.0"
+version = "0.11.1"
description = "Utility library for gitignore style pattern matching of file paths."
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
+ {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
+]
[[package]]
name = "platformdirs"
@@ -176,10 +833,14 @@
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
+ {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
+]
[package.extras]
-docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
-test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"]
+test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
[[package]]
name = "pluggy"
@@ -188,31 +849,34 @@
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
+ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
+]
[package.extras]
-testing = ["pytest-benchmark", "pytest"]
-dev = ["tox", "pre-commit"]
-
-[[package]]
-name = "py"
-version = "1.11.0"
-description = "library with cross-python path, ini-parsing, io, code, log facilities"
-category = "dev"
-optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+dev = ["pre-commit", "tox"]
+testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pylint"
-version = "2.15.0"
+version = "2.17.3"
description = "python code static checker"
category = "dev"
optional = false
python-versions = ">=3.7.2"
+files = [
+ {file = "pylint-2.17.3-py3-none-any.whl", hash = "sha256:a6cbb4c6e96eab4a3c7de7c6383c512478f58f88d95764507d84c899d656a89a"},
+ {file = "pylint-2.17.3.tar.gz", hash = "sha256:761907349e699f8afdcd56c4fe02f3021ab5b3a0fc26d19a9bfdc66c7d0d5cd5"},
+]
[package.dependencies]
-astroid = ">=2.12.4,<=2.14.0-dev0"
+astroid = ">=2.15.4,<=2.17.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-dill = ">=0.2"
+dill = [
+ {version = ">=0.2", markers = "python_version < \"3.11\""},
+ {version = ">=0.3.6", markers = "python_version >= \"3.11\""},
+]
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0"
@@ -224,36 +888,46 @@
testutils = ["gitpython (>3)"]
[[package]]
-name = "pyparsing"
-version = "3.0.9"
-description = "pyparsing module - Classes and methods to define and execute parsing grammars"
-category = "dev"
-optional = false
-python-versions = ">=3.6.8"
-
-[package.extras]
-diagrams = ["railroad-diagrams", "jinja2"]
-
-[[package]]
name = "pytest"
-version = "7.1.2"
+version = "7.3.1"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"},
+ {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"},
+]
[package.dependencies]
-atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
-attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
-py = ">=1.8.2"
-tomli = ">=1.0.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
-testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+
+[[package]]
+name = "pytest-cov"
+version = "4.1.0"
+description = "Pytest plugin for measuring coverage."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
+ {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
+]
+
+[package.dependencies]
+coverage = {version = ">=5.2.1", extras = ["toml"]}
+pytest = ">=4.6"
+
+[package.extras]
+testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]]
name = "tomli"
@@ -262,59 +936,209 @@
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
[[package]]
name = "tomlkit"
-version = "0.11.4"
+version = "0.11.8"
description = "Style preserving TOML library"
category = "dev"
optional = false
-python-versions = ">=3.6,<4.0"
+python-versions = ">=3.7"
+files = [
+ {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"},
+ {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"},
+]
[[package]]
name = "typing-extensions"
-version = "4.3.0"
+version = "4.5.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
+ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
+]
[[package]]
name = "wrapt"
-version = "1.14.1"
+version = "1.15.0"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
+files = [
+ {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"},
+ {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"},
+ {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"},
+ {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"},
+ {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"},
+ {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"},
+ {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"},
+ {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"},
+ {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"},
+ {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"},
+ {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"},
+ {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"},
+ {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"},
+ {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"},
+ {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"},
+ {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"},
+ {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"},
+ {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"},
+ {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"},
+ {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"},
+ {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"},
+ {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"},
+ {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"},
+ {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"},
+ {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"},
+ {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"},
+ {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"},
+ {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"},
+ {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"},
+ {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"},
+ {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"},
+ {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"},
+ {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"},
+ {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"},
+ {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"},
+ {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"},
+ {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"},
+ {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"},
+ {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"},
+ {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"},
+ {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"},
+ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"},
+]
+
+[[package]]
+name = "yarl"
+version = "1.8.2"
+description = "Yet another URL library"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"},
+ {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"},
+ {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"},
+ {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"},
+ {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"},
+ {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"},
+ {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"},
+ {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"},
+ {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"},
+ {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"},
+ {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"},
+ {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"},
+ {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"},
+ {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"},
+ {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"},
+ {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"},
+ {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"},
+ {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"},
+ {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"},
+ {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"},
+ {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"},
+ {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"},
+ {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"},
+ {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"},
+ {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"},
+ {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"},
+ {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"},
+ {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"},
+ {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"},
+ {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"},
+ {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"},
+ {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"},
+ {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"},
+ {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"},
+ {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"},
+ {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"},
+ {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"},
+ {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"},
+ {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"},
+ {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"},
+ {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"},
+ {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"},
+ {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"},
+ {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"},
+ {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"},
+ {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"},
+ {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"},
+ {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"},
+ {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"},
+ {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"},
+ {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"},
+ {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"},
+ {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"},
+ {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"},
+ {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"},
+ {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"},
+ {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"},
+ {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"},
+ {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"},
+ {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"},
+ {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"},
+ {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"},
+ {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"},
+ {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"},
+ {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"},
+ {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"},
+ {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"},
+ {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"},
+ {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"},
+ {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"},
+ {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"},
+ {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"},
+ {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"},
+ {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"},
+]
+
+[package.dependencies]
+idna = ">=2.0"
+multidict = ">=4.0"
[metadata]
-lock-version = "1.1"
+lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "cf914ec389dfc14439554f69f91be1ebba2de3b93a65f63431e148812b372d2f"
-
-[metadata.files]
-astroid = []
-atomicwrites = []
-attrs = []
-black = []
-click = []
-colorama = []
-dill = []
-iniconfig = []
-isort = []
-lazy-object-proxy = []
-mccabe = []
-mypy = []
-mypy-extensions = []
-packaging = []
-pathspec = []
-platformdirs = []
-pluggy = []
-py = []
-pylint = []
-pyparsing = []
-pytest = []
-tomli = []
-tomlkit = []
-typing-extensions = []
-wrapt = []
+content-hash = "3bf487b7609dd8d19bd2bd7a20b29841c5b95e53438749362de15cadce2c3f7a"
diff --git a/pylintrc b/pylintrc
deleted file mode 100644
index e6aa6c4..0000000
--- a/pylintrc
+++ /dev/null
@@ -1,35 +0,0 @@
-[MASTER]
-jobs = 0
-
-[MESSAGES CONTROL]
-# Some of these should be cleaned up, but disable them for now so I can check
-# this in. The too-many-* refactoring warnings will probably remain on for all
-# time, but naming and docstrings can and should be fixed.
-disable=
- assignment-from-none,
- consider-using-f-string,
- consider-using-with,
- duplicate-code,
- fixme,design,
- invalid-name,
- line-too-long,
- locally-disabled,
- missing-docstring,
- too-many-lines,
- unspecified-encoding,
-
-ignore=ndk-gdb.py
-
-[BASIC]
-good-names=i,j,k,ex,Run,_
-
-[VARIABLES]
-dummy-variables-rgx=_
-
-[SIMILARITIES]
-ignore-imports=yes
-
-[FORMAT]
-ignore-long-lines=(?x)(
- ^\s*(\#\ )?<?https?://\S+>?$|
- ^\s*\#\ \S+$)
diff --git a/pyproject.toml b/pyproject.toml
index 4c4900f..d7ff123 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,17 +5,101 @@
authors = ["The Android Open Source Project"]
license = "Apache-2.0"
+[tool.poetry.scripts]
+update-prebuilt-ndk = "ndk.tools.ndkgitprebuilts:App.main"
+
[tool.poetry.dependencies]
python = "^3.10"
-[tool.poetry.dev-dependencies]
-# Poetry handles 0.* differently than other versions. ^0.1 may not upgrade to
-# 0.2, but ^1.1 may upgrade to 1.2.
-# https://python-poetry.org/docs/dependency-specification/
-mypy = "^0.971"
-pylint = "^2.15.0"
-pytest = "^7.1.2"
-black = "^22.6.0"
+[tool.poetry.group.dev.dependencies]
+click = "^8.1.3"
+fetchartifact = {path = "../development/python-packages/fetchartifact", develop = true}
+aiohttp = "^3.8.4"
+mypy = "^1.2.0"
+pylint = "^2.17.3"
+pytest = "^7.3.1"
+black = "^23.3.0"
+isort = "^5.12.0"
+gdbrunner = {path = "../development/python-packages/gdbrunner"}
+adb = {path = "../development/python-packages/adb"}
+pytest-cov = "^4.1.0"
+
+[tool.black]
+# The default includes "build", which excludes both build/ and tests/build.
+exclude = """
+/(
+ |.git
+ |.venv
+ |venv
+ |.mypy_cache
+)/
+"""
+
+[tool.isort]
+profile = "black"
+
+[tool.mypy]
+check_untyped_defs = true
+# TODO: Investigate fixing type signatures for mocks.
+# disallow_any_decorated = True
+# This flags a *lot* of things since things like json.load return Any.
+# disallow_any_expr = True
+disallow_any_generics = true
+disallow_untyped_decorators = true
+disallow_untyped_defs = true
+follow_imports = "silent"
+implicit_reexport = false
+namespace_packages = true
+no_implicit_optional = true
+show_error_codes = true
+strict_equality = true
+warn_redundant_casts = true
+warn_return_any = true
+warn_unreachable = true
+warn_unused_configs = true
+warn_unused_ignores = true
+disallow_any_unimported = true
+disallow_subclassing_any = true
+exclude = [
+ "^tests/",
+ "^build/tools/make_standalone_toolchain.py$",
+ "^parse_elfnote.py$",
+ "^scripts/update_dac.py$",
+ "^scripts/gen_release_table.py$",
+ "^scripts/create_windows_instance.py$",
+]
+
+[tool.pylint]
+jobs = 0
+disable = [
+ "assignment-from-none",
+ "consider-using-f-string",
+ "consider-using-with",
+ "duplicate-code",
+ "fixme,design",
+ "invalid-name",
+ "line-too-long", # black handles this
+ "locally-disabled",
+ "missing-docstring",
+ "too-many-lines",
+]
+good-names = [
+ "i",
+ "j",
+ "k",
+ "ex",
+ "Run",
+ "_",
+]
+dummy-variables-rgx = "_"
+ignore-imports = true
+
+[tool.pytest.ini_options]
+# Required for testing ndkstack.py and ndkgdb.py, since the default import mode
+# prepends the directory of the test (which in those cases would be named
+# ndkstack or ndkgdb...) to sys.path, so `import ndkstack` finds the test
+# package rather than the module under test.
+addopts = "--import-mode=importlib"
[build-system]
requires = ["poetry-core>=1.0.0"]
diff --git a/qa_config.json b/qa_config.json
index d5773ad..c10a8c5 100644
--- a/qa_config.json
+++ b/qa_config.json
@@ -1,8 +1,5 @@
{
"devices": {
- "19": [
- "armeabi-v7a"
- ],
"21": [
"armeabi-v7a",
"arm64-v8a"
@@ -11,9 +8,11 @@
"x86"
],
"32": [
- "armeabi-v7a",
- "arm64-v8a",
"x86_64"
+ ],
+ "34": [
+ "armeabi-v7a",
+ "arm64-v8a"
]
}
}
diff --git a/scripts/create_windows_instance.py b/scripts/create_windows_instance.py
index cc3b1f4..3b5cb90 100755
--- a/scripts/create_windows_instance.py
+++ b/scripts/create_windows_instance.py
@@ -21,10 +21,10 @@
import subprocess
import textwrap
import time
+
import winrm # pylint:disable=import-error
import yaml
-
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
GCE_IMAGE = "windows-server-2016-dc-v20171010"
GCE_IMAGE_PROJECT = "windows-cloud"
diff --git a/scripts/gen_release_table.py b/scripts/gen_release_table.py
index 892861c..9f483a6 100755
--- a/scripts/gen_release_table.py
+++ b/scripts/gen_release_table.py
@@ -18,14 +18,13 @@
from __future__ import annotations
import argparse
-from dataclasses import dataclass, field
import logging
-from pathlib import Path
import re
import sys
+from dataclasses import dataclass, field
+from pathlib import Path
from typing import Optional
-
# pylint: disable=design
diff --git a/scripts/update_dac.py b/scripts/update_dac.py
index f59178a..7f243db 100755
--- a/scripts/update_dac.py
+++ b/scripts/update_dac.py
@@ -31,7 +31,6 @@
import shutil
import subprocess
-
THIS_DIR = os.path.realpath(os.path.dirname(__file__))
NDK_DIR = os.path.dirname(THIS_DIR)
diff --git a/scripts/update_kokoro_prebuilts.py b/scripts/update_kokoro_prebuilts.py
index 3d24825..c8cf8ac 100755
--- a/scripts/update_kokoro_prebuilts.py
+++ b/scripts/update_kokoro_prebuilts.py
@@ -33,22 +33,21 @@
"""
import argparse
-from dataclasses import dataclass
import glob
import logging
import os
-from pathlib import Path
import re
import shlex
import shutil
import subprocess
import sys
import textwrap
+from dataclasses import dataclass
+from pathlib import Path
from typing import Sequence
from uuid import UUID
from xml.etree import ElementTree
-
THIS_DIR = Path(__file__).resolve().parent
REPO_ROOT = THIS_DIR.parent.parent
diff --git a/setup.py b/setup.py
index 6a9ece0..e5b2d77 100644
--- a/setup.py
+++ b/setup.py
@@ -17,8 +17,8 @@
from __future__ import absolute_import
import os
-import setuptools # type: ignore
+import setuptools # type: ignore
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
diff --git a/sources/android/native_app_glue/android_native_app_glue.h b/sources/android/native_app_glue/android_native_app_glue.h
index 89eafe3..35a786e 100644
--- a/sources/android/native_app_glue/android_native_app_glue.h
+++ b/sources/android/native_app_glue/android_native_app_glue.h
@@ -47,7 +47,7 @@
*
* 2/ android_main() receives a pointer to a valid "android_app" structure
* that contains references to other important objects, e.g. the
- * ANativeActivity obejct instance the application is running in.
+ * ANativeActivity object instance the application is running in.
*
* 3/ the "android_app" object holds an ALooper instance that already
* listens to two important things:
diff --git a/tests/build/NDK_ANALYZE/test.py b/tests/build/NDK_ANALYZE/test.py
index 1113fbe..9242b12 100644
--- a/tests/build/NDK_ANALYZE/test.py
+++ b/tests/build/NDK_ANALYZE/test.py
@@ -22,18 +22,21 @@
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
"""Checks ndk-build output for clang-tidy warnings."""
- ndk_build = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
+ ndk_build = os.path.join(ndk_path, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build += ".cmd"
+ project_path = "project"
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
- 'NDK_ANALYZE=1',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
+ "NDK_ANALYZE=1",
]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8')
+ proc = subprocess.Popen(
+ [ndk_build, "-C", project_path] + ndk_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ )
out, _ = proc.communicate()
if proc.returncode != 0:
return proc.returncode == 0, out
diff --git a/tests/build/NDK_ANALYZE/test_config.py b/tests/build/NDK_ANALYZE/test_config.py
index 250efaf..73f1652 100644
--- a/tests/build/NDK_ANALYZE/test_config.py
+++ b/tests/build/NDK_ANALYZE/test_config.py
@@ -3,6 +3,6 @@
def build_unsupported(_test):
- if sys.platform == 'win32':
+ if sys.platform == "win32":
return sys.platform
return None
diff --git a/tests/build/allow_missing_prebuilt/test.py b/tests/build/allow_missing_prebuilt/test.py
index 53830f1..e4a75f6 100644
--- a/tests/build/allow_missing_prebuilt/test.py
+++ b/tests/build/allow_missing_prebuilt/test.py
@@ -23,57 +23,63 @@
from ndk.test.spec import BuildConfiguration
-PROJECT_PATH = Path('project')
+PROJECT_PATH = Path("project")
-def ndk_build(ndk_path: str, config: BuildConfiguration,
- sync_only: bool = False) -> tuple[bool, str]:
- ndk_build_path = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build_path += '.cmd'
+def ndk_build(
+ ndk_path: str, config: BuildConfiguration, sync_only: bool = False
+) -> tuple[bool, str]:
+ ndk_build_path = os.path.join(ndk_path, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build_path += ".cmd"
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
]
if sync_only:
- ndk_args.append('-n')
- proc = subprocess.run([ndk_build_path, '-C', str(PROJECT_PATH)] + ndk_args,
- check=False,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- encoding='utf-8')
+ ndk_args.append("-n")
+ proc = subprocess.run(
+ [ndk_build_path, "-C", str(PROJECT_PATH)] + ndk_args,
+ check=False,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ )
return proc.returncode == 0, proc.stdout
-def check_build_fail_if_missing(ndk_path: str,
- config: BuildConfiguration) -> Optional[str]:
+def check_build_fail_if_missing(
+ ndk_path: str, config: BuildConfiguration
+) -> Optional[str]:
"""Checks that the build fails if the libraries are missing."""
success, output = ndk_build(ndk_path, config)
if not success:
return None
- return f'Build should have failed because prebuilts are missing:\n{output}'
+ return f"Build should have failed because prebuilts are missing:\n{output}"
-def check_sync_pass_if_missing(ndk_path: str,
- config: BuildConfiguration) -> Optional[str]:
+def check_sync_pass_if_missing(
+ ndk_path: str, config: BuildConfiguration
+) -> Optional[str]:
"""Checks that the build fails if the libraries are missing."""
success, output = ndk_build(ndk_path, config, sync_only=True)
if success:
return None
- return f'Build should have passed because ran with -n:\n{output}'
+ return f"Build should have passed because ran with -n:\n{output}"
-def check_build_pass_if_present(ndk_path: str,
- config: BuildConfiguration) -> Optional[str]:
+def check_build_pass_if_present(
+ ndk_path: str, config: BuildConfiguration
+) -> Optional[str]:
"""Checks that the build fails if the libraries are missing."""
- prebuilt_dir = PROJECT_PATH / 'jni' / config.abi
+ prebuilt_dir = PROJECT_PATH / "jni" / config.abi
prebuilt_dir.mkdir(parents=True)
- (prebuilt_dir / 'libfoo.a').touch()
- (prebuilt_dir / 'libfoo.so').touch()
+ (prebuilt_dir / "libfoo.a").touch()
+ (prebuilt_dir / "libfoo.so").touch()
success, output = ndk_build(ndk_path, config)
if success:
return None
- return f'Build should have passed because prebuilts are present:\n{output}'
+ return f"Build should have passed because prebuilts are present:\n{output}"
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
@@ -93,4 +99,4 @@
return False, error
if (error := check_build_pass_if_present(ndk_path, config)) is not None:
return False, error
- return True, ''
+ return True, ""
diff --git a/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py b/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py
index 5bb23a5..44fdd67 100644
--- a/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py
+++ b/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'arm64-v8a':
+ if test.config.abi != "arm64-v8a":
return test.config.abi
return None
diff --git a/tests/build/b9193874-neon/test_config.py b/tests/build/b9193874-neon/test_config.py
index 2c0130e..f765dff 100644
--- a/tests/build/b9193874-neon/test_config.py
+++ b/tests/build/b9193874-neon/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
- return 'clang'
+ return "clang"
diff --git a/tests/build/branch-protection/test.py b/tests/build/branch-protection/test.py
index 2d5956e..82c7196 100644
--- a/tests/build/branch-protection/test.py
+++ b/tests/build/branch-protection/test.py
@@ -21,12 +21,11 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks LOCAL_BRANCH_PROTECTION is propagated for arm64-v8a."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
- if config.abi == 'arm64-v8a':
- verifier.expect_flag('-mbranch-protection=standard')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ if config.abi == "arm64-v8a":
+ verifier.expect_flag("-mbranch-protection=standard")
else:
- verifier.expect_not_flag('-mbranch-protection=standard')
+ verifier.expect_not_flag("-mbranch-protection=standard")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/build_id/test.py b/tests/build/build_id/test.py
index bb6b43a..21cd157 100644
--- a/tests/build/build_id/test.py
+++ b/tests/build/build_id/test.py
@@ -27,10 +27,9 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks correct --build-id use."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
- verifier.expect_flag('-Wl,--build-id=sha1')
- verifier.expect_not_flag('-Wl,--build-id')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.expect_flag("-Wl,--build-id=sha1")
+ verifier.expect_not_flag("-Wl,--build-id")
return verifier.verify().make_test_result_tuple()
diff --git a/tests/build/c++-stl-source-extensions/test_config.py b/tests/build/c++-stl-source-extensions/test_config.py
index 8da334a..60124e4 100644
--- a/tests/build/c++-stl-source-extensions/test_config.py
+++ b/tests/build/c++-stl-source-extensions/test_config.py
@@ -4,7 +4,7 @@
def build_unsupported(_test):
- if platform.system() == 'Windows':
+ if platform.system() == "Windows":
# This test is specifically checking that we can handle all the
# different C++ source extensions, including those that differ only by
# case. Windows is case insensitive, so this test fails hard.
diff --git a/tests/build/clang_tidy/test.py b/tests/build/clang_tidy/test.py
index 888d0a1..04e4376 100644
--- a/tests/build/clang_tidy/test.py
+++ b/tests/build/clang_tidy/test.py
@@ -23,18 +23,21 @@
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
"""Checks ndk-build V=1 output for clang-tidy warnings."""
- ndk_build = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
+ ndk_build = os.path.join(ndk_path, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build += ".cmd"
+ project_path = "project"
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
- 'V=1',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
+ "V=1",
]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8')
+ proc = subprocess.Popen(
+ [ndk_build, "-C", project_path] + ndk_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ )
out, _ = proc.communicate()
if proc.returncode != 0:
return proc.returncode == 0, out
diff --git a/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py b/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py
index 1da7ea0..4158f7d 100644
--- a/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py
+++ b/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py
@@ -1,5 +1,6 @@
def extra_cmake_flags():
- return ['-DANDROID_CPP_FEATURES=no-exceptions']
+ return ["-DANDROID_CPP_FEATURES=no-exceptions"]
+
def is_negative_test():
return True
diff --git a/tests/build/cmake-libc++-shared/test_config.py b/tests/build/cmake-libc++-shared/test_config.py
index 6458cd2..3fa7cd6 100644
--- a/tests/build/cmake-libc++-shared/test_config.py
+++ b/tests/build/cmake-libc++-shared/test_config.py
@@ -6,4 +6,4 @@
def extra_cmake_flags(): # pylint: disable=missing-docstring
- return ['-DANDROID_STL=c++_shared']
+ return ["-DANDROID_STL=c++_shared"]
diff --git a/tests/build/cmake-neon/test_config.py b/tests/build/cmake-neon/test_config.py
index 0cedec6..559ff78 100644
--- a/tests/build/cmake-neon/test_config.py
+++ b/tests/build/cmake-neon/test_config.py
@@ -1,8 +1,8 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
def extra_cmake_flags():
- return ['-DANDROID_ARM_NEON=TRUE']
+ return ["-DANDROID_ARM_NEON=TRUE"]
diff --git a/tests/build/cmake-response-file/test_config.py b/tests/build/cmake-response-file/test_config.py
index e8bf50c..d2c6e11 100644
--- a/tests/build/cmake-response-file/test_config.py
+++ b/tests/build/cmake-response-file/test_config.py
@@ -1,2 +1,2 @@
def extra_cmake_flags():
- return ['-DCMAKE_NINJA_FORCE_RESPONSE_FILE=TRUE']
+ return ["-DCMAKE_NINJA_FORCE_RESPONSE_FILE=TRUE"]
diff --git a/tests/build/cmake_exports/CMakeLists.txt b/tests/build/cmake_exports/CMakeLists.txt
index 8ac37df..f23a5e5 100644
--- a/tests/build/cmake_exports/CMakeLists.txt
+++ b/tests/build/cmake_exports/CMakeLists.txt
@@ -1,22 +1,12 @@
cmake_minimum_required(VERSION 3.22)
project(CMakeExportsTest C CXX)
-if(NOT DEFINED CMAKE_C_COMPILER)
- message(FATAL_ERROR "CMAKE_C_COMPILER not set")
-endif()
-
-if(NOT DEFINED CMAKE_CXX_COMPILER)
- message(FATAL_ERROR "CMAKE_CXX_COMPILER not set")
-endif()
-
-if(NOT DEFINED CMAKE_AR)
- message(FATAL_ERROR "CMAKE_AR not set")
-endif()
-
-if(NOT DEFINED CMAKE_STRIP)
- message(FATAL_ERROR "CMAKE_STRIP not set")
-endif()
-
-if(NOT DEFINED CMAKE_RANLIB)
- message(FATAL_ERROR "CMAKE_RANLIB not set")
-endif()
\ No newline at end of file
+foreach(TEST_VAR CMAKE_C_COMPILER CMAKE_CXX_COMPILER CMAKE_AR CMAKE_STRIP CMAKE_RANLIB)
+ if(NOT DEFINED "${TEST_VAR}")
+ message(FATAL_ERROR "${TEST_VAR} not set")
+ elseif(NOT ${TEST_VAR} MATCHES "${CMAKE_ANDROID_NDK}")
+ message(FATAL_ERROR "${TEST_VAR} (${${TEST_VAR}}) is outside the NDK (${CMAKE_ANDROID_NDK})")
+ else()
+ message(WARNING "${TEST_VAR} is ${${TEST_VAR}}")
+ endif()
+endforeach()
diff --git a/tests/build/cmake_find_root_path/test_config.py b/tests/build/cmake_find_root_path/test_config.py
index cb951ef..47a215c 100644
--- a/tests/build/cmake_find_root_path/test_config.py
+++ b/tests/build/cmake_find_root_path/test_config.py
@@ -2,4 +2,4 @@
def extra_cmake_flags() -> List[str]:
- return ['-DCMAKE_FIND_ROOT_PATH=foobar']
+ return ["-DCMAKE_FIND_ROOT_PATH=foobar"]
diff --git a/tests/build/cmake_system_stl/test_config.py b/tests/build/cmake_system_stl/test_config.py
index 544146c..c37c8e7 100644
--- a/tests/build/cmake_system_stl/test_config.py
+++ b/tests/build/cmake_system_stl/test_config.py
@@ -1,4 +1,2 @@
def extra_cmake_flags():
- return [
- '-DANDROID_STL=system', '-DANDROID_CPP_FEATURES=no-rtti no-exceptions'
- ]
+ return ["-DANDROID_STL=system", "-DANDROID_CPP_FEATURES=no-rtti no-exceptions"]
diff --git a/tests/build/cmake_toolchain_defaults/test.py b/tests/build/cmake_toolchain_defaults/test.py
index f48a2a7..8224988 100644
--- a/tests/build/cmake_toolchain_defaults/test.py
+++ b/tests/build/cmake_toolchain_defaults/test.py
@@ -30,22 +30,20 @@
"""
cmake = find_cmake()
ninja = find_ninja()
- toolchain_path = Path(ndk_path) / 'build/cmake/android.toolchain.cmake'
- project_path = 'project'
+ toolchain_path = Path(ndk_path) / "build/cmake/android.toolchain.cmake"
+ project_path = "project"
if config.toolchain_file is CMakeToolchainFile.Legacy:
- toolchain_mode = 'ON'
+ toolchain_mode = "ON"
else:
- toolchain_mode = 'OFF'
+ toolchain_mode = "OFF"
cmake_cmd = [
str(cmake),
- f'-DCMAKE_TOOLCHAIN_FILE={toolchain_path}',
- f'-DCMAKE_MAKE_PROGRAM={ninja}',
- f'-DANDROID_USE_LEGACY_TOOLCHAIN_FILE={toolchain_mode}',
- '-GNinja',
+ f"-DCMAKE_TOOLCHAIN_FILE={toolchain_path}",
+ f"-DCMAKE_MAKE_PROGRAM={ninja}",
+ f"-DANDROID_USE_LEGACY_TOOLCHAIN_FILE={toolchain_mode}",
+ "-GNinja",
]
- result = subprocess.run(cmake_cmd,
- check=False,
- cwd=project_path,
- capture_output=True,
- text=True)
+ result = subprocess.run(
+ cmake_cmd, check=False, cwd=project_path, capture_output=True, text=True
+ )
return result.returncode == 0, result.stdout
diff --git a/tests/build/cortex-a53-835769/test_config.py b/tests/build/cortex-a53-835769/test_config.py
index 5bb23a5..44fdd67 100644
--- a/tests/build/cortex-a53-835769/test_config.py
+++ b/tests/build/cortex-a53-835769/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'arm64-v8a':
+ if test.config.abi != "arm64-v8a":
return test.config.abi
return None
diff --git a/tests/build/disabling_neon_is_error/test_config.py b/tests/build/disabling_neon_is_error/test_config.py
index 1d5b132..1683232 100644
--- a/tests/build/disabling_neon_is_error/test_config.py
+++ b/tests/build/disabling_neon_is_error/test_config.py
@@ -3,8 +3,9 @@
from ndk.abis import Abi
from ndk.test.buildtest.case import Test
+
def build_unsupported(test: Test) -> Optional[str]:
- if test.config.abi != Abi('armeabi-v7a'):
+ if test.config.abi != Abi("armeabi-v7a"):
return test.config.abi
return None
@@ -14,4 +15,4 @@
def extra_cmake_flags() -> list[str]:
- return ['-DANDROID_ARM_NEON=OFF']
+ return ["-DANDROID_ARM_NEON=OFF"]
diff --git a/tests/build/enabling_neon_is_okay/test_config.py b/tests/build/enabling_neon_is_okay/test_config.py
index b1f57e8..ba38941 100644
--- a/tests/build/enabling_neon_is_okay/test_config.py
+++ b/tests/build/enabling_neon_is_okay/test_config.py
@@ -5,10 +5,10 @@
def build_unsupported(test: Test) -> Optional[str]:
- if test.config.abi != Abi('armeabi-v7a'):
+ if test.config.abi != Abi("armeabi-v7a"):
return test.config.abi
return None
def extra_cmake_flags() -> list[str]:
- return ['-DANDROID_ARM_NEON=ON']
+ return ["-DANDROID_ARM_NEON=ON"]
diff --git a/tests/build/fob64_19/CMakeLists.txt b/tests/build/fob64_19/CMakeLists.txt
deleted file mode 100644
index 3f501e0..0000000
--- a/tests/build/fob64_19/CMakeLists.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-cmake_minimum_required(VERSION 3.6.0)
-
-add_library(libfoo
- STATIC
- jni/foo.cpp
-)
diff --git a/tests/build/fob64_19/jni/Android.mk b/tests/build/fob64_19/jni/Android.mk
deleted file mode 100644
index 7ce18d5..0000000
--- a/tests/build/fob64_19/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_STATIC_LIBRARY)
diff --git a/tests/build/fob64_19/jni/Application.mk b/tests/build/fob64_19/jni/Application.mk
deleted file mode 100644
index 05e396c..0000000
--- a/tests/build/fob64_19/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_STL := c++_static
-APP_PLATFORM := android-19
diff --git a/tests/build/fob64_19/jni/foo.cpp b/tests/build/fob64_19/jni/foo.cpp
deleted file mode 100644
index 584c576..0000000
--- a/tests/build/fob64_19/jni/foo.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-#define _FILE_OFFSET_BITS 64
-#include <cstdio>
-
-namespace {
-
-// These should be unavailable before android-24, and available afterward.
-using ::fgetpos;
-using ::fsetpos;
-
-}
diff --git a/tests/build/fob64_19/test_config.py b/tests/build/fob64_19/test_config.py
deleted file mode 100644
index 9822f4f..0000000
--- a/tests/build/fob64_19/test_config.py
+++ /dev/null
@@ -1,12 +0,0 @@
-def is_negative_test():
- return True
-
-
-def extra_cmake_flags():
- return ['-DANDROID_PLATFORM=android-19']
-
-
-def build_unsupported(test):
- if '64' in test.config.abi:
- return test.config.abi
- return None
diff --git a/tests/build/fob64_21/test_config.py b/tests/build/fob64_21/test_config.py
index d082c4f..9ea224a 100644
--- a/tests/build/fob64_21/test_config.py
+++ b/tests/build/fob64_21/test_config.py
@@ -3,10 +3,10 @@
def extra_cmake_flags():
- return ['-DANDROID_PLATFORM=android-21']
+ return ["-DANDROID_PLATFORM=android-21"]
def build_unsupported(test):
- if '64' in test.config.abi:
+ if "64" in test.config.abi:
return test.config.abi
return None
diff --git a/tests/build/fob64_24/test_config.py b/tests/build/fob64_24/test_config.py
index f1c51cd..fbbd2ee 100644
--- a/tests/build/fob64_24/test_config.py
+++ b/tests/build/fob64_24/test_config.py
@@ -1,8 +1,8 @@
def extra_cmake_flags():
- return ['-DANDROID_PLATFORM=android-24']
+ return ["-DANDROID_PLATFORM=android-24"]
def build_unsupported(test):
- if '64' in test.config.abi:
+ if "64" in test.config.abi:
return test.config.abi
return None
diff --git a/tests/build/fortify_strcpy_available/test_config.py b/tests/build/fortify_strcpy_available/test_config.py
index 14f97c6..9d9b554 100644
--- a/tests/build/fortify_strcpy_available/test_config.py
+++ b/tests/build/fortify_strcpy_available/test_config.py
@@ -1,4 +1,2 @@
-
-
def is_negative_test() -> bool:
return True
diff --git a/tests/build/gc_sections/test.py b/tests/build/gc_sections/test.py
index 419cae1..8bb436a 100644
--- a/tests/build/gc_sections/test.py
+++ b/tests/build/gc_sections/test.py
@@ -13,9 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-"""Check that -Wl,--gc-sections is used.
+"""Check that -Wl,--gc-sections is used, but only on release builds.
+
+This flag should not be present for debug builds because that can strip functions that
+the user may want to evaluate while debugging.
https://github.com/android/ndk/issues/1717
+https://github.com/android/ndk/issues/1813
"""
from pathlib import Path
from typing import Optional
@@ -27,5 +31,37 @@
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks correct --gc-sections use."""
verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=Release")
+ verifier.with_ndk_build_flag("APP_DEBUG=false")
verifier.expect_flag("-Wl,--gc-sections")
- return verifier.verify().make_test_result_tuple()
+ passed, message = verifier.verify().make_test_result_tuple(
+ "With -DCMAKE_BUILD_TYPE=Release and APP_DEBUG=false"
+ )
+ if not passed:
+ return passed, message
+
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=RelWithDebInfo")
+ verifier.expect_flag("-Wl,--gc-sections")
+ passed, message = verifier.verify_cmake().make_test_result_tuple(
+ "With -DCMAKE_BUILD_TYPE=RelWithDebInfo"
+ )
+ if not passed:
+ return passed, message
+
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=MinSizeRel")
+ verifier.expect_flag("-Wl,--gc-sections")
+ passed, message = verifier.verify_cmake().make_test_result_tuple(
+ "With -DCMAKE_BUILD_TYPE=MinSizeRel"
+ )
+ if not passed:
+ return passed, message
+
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=Debug")
+ verifier.with_ndk_build_flag("APP_DEBUG=true")
+ verifier.expect_not_flag("-Wl,--gc-sections")
+ return verifier.verify().make_test_result_tuple(
+ "With -DCMAKE_BUILD_TYPE=Debug and APP_DEBUG=true"
+ )
diff --git a/tests/build/gc_sections/test_config.py b/tests/build/gc_sections/test_config.py
new file mode 100644
index 0000000..869627d
--- /dev/null
+++ b/tests/build/gc_sections/test_config.py
@@ -0,0 +1,8 @@
+from ndk.test.buildtest.case import Test
+from ndk.test.spec import CMakeToolchainFile
+
+
+def build_broken(test: Test) -> tuple[str | None, str | None]:
+ if test.config.toolchain_file is CMakeToolchainFile.Default:
+ return "new CMake toolchain", "https://github.com/android/ndk/issues/1813"
+ return None, None
diff --git a/tests/build/gradle_injected_import_path/test_config.py b/tests/build/gradle_injected_import_path/test_config.py
index d2af0b4..0cdc24d 100644
--- a/tests/build/gradle_injected_import_path/test_config.py
+++ b/tests/build/gradle_injected_import_path/test_config.py
@@ -6,4 +6,4 @@
def extra_ndk_build_flags() -> List[str]:
- return [f'NDK_GRADLE_INJECTED_IMPORT_PATH={THIS_DIR}']
+ return [f"NDK_GRADLE_INJECTED_IMPORT_PATH={THIS_DIR}"]
diff --git a/tests/build/issue21132-__ARM_ARCH__/jni/Android.mk b/tests/build/issue21132-__ARM_ARCH__/jni/Android.mk
deleted file mode 100644
index 66cdd20..0000000
--- a/tests/build/issue21132-__ARM_ARCH__/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := issue21132-__ARM_ARCH__
-LOCAL_SRC_FILES := issue21132-__ARM_ARCH__.c
-include $(BUILD_EXECUTABLE)
-
diff --git a/tests/build/issue21132-__ARM_ARCH__/jni/Application.mk b/tests/build/issue21132-__ARM_ARCH__/jni/Application.mk
deleted file mode 100644
index 79b5a35..0000000
--- a/tests/build/issue21132-__ARM_ARCH__/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-# Only armeabi-v7a* and x86 instruction for fast __swap32md
-APP_ABI := armeabi-v7a x86
diff --git a/tests/build/issue21132-__ARM_ARCH__/jni/issue21132-__ARM_ARCH__.c b/tests/build/issue21132-__ARM_ARCH__/jni/issue21132-__ARM_ARCH__.c
deleted file mode 100644
index c3341e8..0000000
--- a/tests/build/issue21132-__ARM_ARCH__/jni/issue21132-__ARM_ARCH__.c
+++ /dev/null
@@ -1,14 +0,0 @@
-#if defined(__ARM_ARCH_5__)
-#error
-#elif defined(__ARM_ARCH_5T__)
-#error
-#elif defined(__ARM_ARCH_5E__)
-#error
-#elif defined(__ARM_ARCH_5TE__)
-#error
-#endif
-
-int main(int argc, char *argv[])
-{
- return 0;
-}
diff --git a/tests/build/issue21132-__ARM_ARCH__/test_config.py b/tests/build/issue21132-__ARM_ARCH__/test_config.py
deleted file mode 100644
index b144a90..0000000
--- a/tests/build/issue21132-__ARM_ARCH__/test_config.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def build_unsupported(test):
- if test.config.abi not in ('armeabi-v7a', 'x86'):
- return test.config.abi
- return None
diff --git a/tests/build/issue22336-ICE-emit-rtl/test_config.py b/tests/build/issue22336-ICE-emit-rtl/test_config.py
index 84e726b..1db330b 100644
--- a/tests/build/issue22336-ICE-emit-rtl/test_config.py
+++ b/tests/build/issue22336-ICE-emit-rtl/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
diff --git a/tests/build/issue22345-ICE-postreload/test_config.py b/tests/build/issue22345-ICE-postreload/test_config.py
index 84e726b..1db330b 100644
--- a/tests/build/issue22345-ICE-postreload/test_config.py
+++ b/tests/build/issue22345-ICE-postreload/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
diff --git a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Android.mk b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Android.mk
deleted file mode 100644
index 60ed9e2..0000000
--- a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE
-LOCAL_SRC_FILES := issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c
-LOCAL_LDLIBS += -lOpenSLES
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Application.mk b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Application.mk
deleted file mode 100644
index e3af5f4..0000000
--- a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_ABI := all
-APP_PLATFORM := android-14
\ No newline at end of file
diff --git a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c
deleted file mode 100644
index f20eb46..0000000
--- a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <SLES/OpenSLES.h>
-#include <SLES/OpenSLES_Android.h>
-
-SLInterfaceID foo()
-{
- return SL_IID_ANDROIDBUFFERQUEUESOURCE;
-}
diff --git a/tests/build/issue65705-asm-pc/test_config.py b/tests/build/issue65705-asm-pc/test_config.py
index 84e726b..1db330b 100644
--- a/tests/build/issue65705-asm-pc/test_config.py
+++ b/tests/build/issue65705-asm-pc/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
diff --git a/tests/build/libcxx_headers_no_android_support/test_config.py b/tests/build/libcxx_headers_no_android_support/test_config.py
index b84c8fe..42cf24e 100644
--- a/tests/build/libcxx_headers_no_android_support/test_config.py
+++ b/tests/build/libcxx_headers_no_android_support/test_config.py
@@ -1,2 +1,2 @@
def extra_cmake_flags():
- return ['-DANDROID_PLATFORM=android-21']
+ return ["-DANDROID_PLATFORM=android-21"]
diff --git a/tests/build/link_order/project/jni/Android.mk b/tests/build/link_order/project/jni/Android.mk
deleted file mode 100644
index 22c955e..0000000
--- a/tests/build/link_order/project/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_LDFLAGS := -v
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/link_order/project/jni/Application.mk b/tests/build/link_order/project/jni/Application.mk
deleted file mode 100644
index 3b7baf1..0000000
--- a/tests/build/link_order/project/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_STL := c++_shared
diff --git a/tests/build/link_order/project/jni/foo.cpp b/tests/build/link_order/project/jni/foo.cpp
deleted file mode 100644
index 85e6cd8..0000000
--- a/tests/build/link_order/project/jni/foo.cpp
+++ /dev/null
@@ -1 +0,0 @@
-void foo() {}
diff --git a/tests/build/link_order/test.py b/tests/build/link_order/test.py
deleted file mode 100644
index 73548a7..0000000
--- a/tests/build/link_order/test.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check for correct link order from ndk-build.
-"""
-import difflib
-import os
-import re
-import shlex
-import subprocess
-import sys
-from typing import Iterator, Optional
-
-from ndk.abis import Abi
-from ndk.test.spec import BuildConfiguration
-
-
-def is_linked_item(arg: str) -> bool:
- """Returns True if the argument is an object or library to be linked."""
- if arg.endswith('.a'):
- return True
- if arg.endswith('.o'):
- return True
- if arg.endswith('.so'):
- return True
- if arg.startswith('-l'):
- return True
- return False
-
-
-def find_link_args(link_line: str) -> list[str]:
- """Returns a list of objects and libraries in the link command."""
- args = []
-
- # A trivial attempt at parsing here is fine since we can assume that all
- # our objects and libraries will not include spaces and we don't care about
- # the rest of the arguments.
- #
- # Arguments could be quoted on Windows. shlex.split should be good enough:
- # "C:/src/android-ndk-r17-beta1/build//../platforms/android-21/arch-x86_64/usr/lib/../lib64\\crtbegin_so.o"
- skip_next = False
- for word in shlex.split(link_line):
- if skip_next:
- skip_next = False
- continue
- if word in ('-o', '-soname', '--exclude-libs'):
- skip_next = True
- continue
-
- if is_linked_item(word):
- # Use just the base name so we can compare to an exact expected
- # link order regardless of ABI.
- if os.sep in word or (os.altsep and os.altsep in word):
- word = os.path.basename(word)
- args.append(word)
- return args
-
-
-def builtins_basename(abi: Abi) -> str:
- runtimes_arch = {
- 'armeabi-v7a': 'arm',
- 'arm64-v8a': 'aarch64',
- 'x86': 'i686',
- 'x86_64': 'x86_64',
- }[abi]
- return 'libclang_rt.builtins-' + runtimes_arch + '-android.a'
-
-
-def check_link_order(
- link_line: str,
- config: BuildConfiguration) -> tuple[bool, Optional[Iterator[str]]]:
- """Determines if a given link command has the correct ordering.
-
- Args:
- link_line: The full ld command.
- config: The test's build configuration.
-
- Returns:
- Tuple of (success, diff). The diff will be None on success or a
- difflib.unified_diff result with no line terminations, i.e. a generator
- suitable for use with `' '.join()`. The diff represents the changes
- between the expected link order and the actual link order.
- """
- assert config.api is not None
- expected = [
- 'crtbegin_so.o',
- 'foo.o',
- # The most important part of this test is checking that libunwind.a
- # comes *before* the shared libraries so we can be sure we're actually
- # getting libunwind.a symbols rather than getting them from some shared
- # library dependency that's re-exporting them.
- 'libunwind.a',
- '-latomic',
- 'libc++_shared.so',
- '-lc',
- '-lm',
- '-lm',
- builtins_basename(config.abi),
- '-l:libunwind.a',
- '-ldl',
- '-lc',
- builtins_basename(config.abi),
- '-l:libunwind.a',
- '-ldl',
- 'crtend_so.o',
- ]
- link_args = find_link_args(link_line)
- if link_args == expected:
- return True, None
- return False, difflib.unified_diff(expected, link_args, lineterm='')
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
- """Checks clang's -v output for proper link ordering."""
- ndk_build = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
- ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
- ]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8')
- out, _ = proc.communicate()
- if proc.returncode != 0:
- return proc.returncode == 0, out
-
- link_line: Optional[str] = None
- for line in out.splitlines():
- if 'bin/ld' in re.sub(r'[/\\]+', '/', line):
- if link_line is not None:
- err_msg = 'Found duplicate link lines:\n{}\n{}'.format(
- link_line, line)
- return False, err_msg
- link_line = line
-
- if link_line is None:
- return False, 'Did not find link line in out:\n{}'.format(out)
-
- result, diff = check_link_order(link_line, config)
- return result, '' if diff is None else os.linesep.join(diff)
diff --git a/tests/build/lld_rosegment/test.py b/tests/build/lld_rosegment/test.py
index 7ce6867..b6173c0 100644
--- a/tests/build/lld_rosegment/test.py
+++ b/tests/build/lld_rosegment/test.py
@@ -24,18 +24,24 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks correct --no-rosegment use."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path),
- config).with_api(29)
- verifier.expect_flag('-Wl,--no-rosegment')
- verifier.expect_not_flag('-Wl,--rosegment')
+ # The 'riscv64' ABI has a minimum supported version of 35, so the below
+ # tests of API level 29 and 30 are ignored (the CMake files will simply
+ # reset the value to the minimum supported version). Verify that the
+ # behavior after API level 30 is retained (--no-rosegment does not appear).
+ if config.abi == "riscv64":
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config).with_api(35)
+ verifier.expect_not_flag("-Wl,--no-rosegment")
+ return verifier.verify().make_test_result_tuple()
+
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config).with_api(29)
+ verifier.expect_flag("-Wl,--no-rosegment")
+ verifier.expect_not_flag("-Wl,--rosegment")
result = verifier.verify()
if result.failed():
return result.make_test_result_tuple()
- verifier = FlagVerifier(Path('project'), Path(ndk_path),
- config).with_api(30)
- verifier.expect_not_flag('-Wl,--no-rosegment')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config).with_api(30)
+ verifier.expect_not_flag("-Wl,--no-rosegment")
return verifier.verify().make_test_result_tuple()
diff --git a/tests/build/mstackrealign/test.py b/tests/build/mstackrealign/test.py
index 5e581bb..67f5ed1 100644
--- a/tests/build/mstackrealign/test.py
+++ b/tests/build/mstackrealign/test.py
@@ -26,13 +26,12 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks ndk-build V=1 output for mstackrealign flag."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
assert config.api is not None
- if config.abi == Abi('x86') and config.api < 24:
- verifier.expect_flag('-mstackrealign')
+ if config.abi == Abi("x86") and config.api < 24:
+ verifier.expect_flag("-mstackrealign")
else:
- verifier.expect_not_flag('-mstackrealign')
+ verifier.expect_not_flag("-mstackrealign")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/neon-asm/test_config.py b/tests/build/neon-asm/test_config.py
index 3f9f9cc..2f139ae 100644
--- a/tests/build/neon-asm/test_config.py
+++ b/tests/build/neon-asm/test_config.py
@@ -1,8 +1,8 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
def extra_cmake_flags():
- return ['-DANDROID_ARM_NEON=ON']
+ return ["-DANDROID_ARM_NEON=ON"]
diff --git a/tests/build/neon_tags_have_no_effect/test_config.py b/tests/build/neon_tags_have_no_effect/test_config.py
index f410637..51c5c7e 100644
--- a/tests/build/neon_tags_have_no_effect/test_config.py
+++ b/tests/build/neon_tags_have_no_effect/test_config.py
@@ -5,6 +5,6 @@
def build_unsupported(test: Test) -> Optional[str]:
- if test.config.abi != Abi('armeabi-v7a'):
+ if test.config.abi != Abi("armeabi-v7a"):
return test.config.abi
return None
diff --git a/tests/build/no_platform_gaps/test.py b/tests/build/no_platform_gaps/test.py
index d6c0861..e636cac 100644
--- a/tests/build/no_platform_gaps/test.py
+++ b/tests/build/no_platform_gaps/test.py
@@ -37,19 +37,22 @@
def build(ndk_dir: str, config: BuildConfiguration) -> tuple[bool, str]:
- ndk_build = os.path.join(ndk_dir, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
+ ndk_build = os.path.join(ndk_dir, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build += ".cmd"
+ project_path = "project"
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
- 'V=1',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
+ "V=1",
]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ proc = subprocess.Popen(
+ [ndk_build, "-C", project_path] + ndk_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
out, _ = proc.communicate()
- return proc.returncode == 0, out.decode('utf-8')
+ return proc.returncode == 0, out.decode("utf-8")
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
@@ -59,8 +62,8 @@
apis = []
host = Host.current().tag
triple = ndk.abis.arch_to_triple(ndk.abis.abi_to_arch(config.abi))
- toolchain_dir = Path(ndk_path) / f'toolchains/llvm/prebuilt/{host}'
- lib_dir = toolchain_dir / f'sysroot/usr/lib/{triple}'
+ toolchain_dir = Path(ndk_path) / f"toolchains/llvm/prebuilt/{host}"
+ lib_dir = toolchain_dir / f"sysroot/usr/lib/{triple}"
for path in lib_dir.iterdir():
if not path.is_dir():
continue
@@ -86,4 +89,4 @@
if not result:
return result, out
- return True, ''
+ return True, ""
diff --git a/tests/build/non_android_mk_build_script/test_config.py b/tests/build/non_android_mk_build_script/test_config.py
index 5a48501..ff31650 100644
--- a/tests/build/non_android_mk_build_script/test_config.py
+++ b/tests/build/non_android_mk_build_script/test_config.py
@@ -3,8 +3,8 @@
def extra_ndk_build_flags() -> List[str]:
return [
- 'APP_BUILD_SCRIPT=jni/main.mk',
- 'APP_PROJECT_PATH=null',
- 'NDK_OUT=obj',
- 'NDK_LIBS_OUT=libs',
+ "APP_BUILD_SCRIPT=jni/main.mk",
+ "APP_PROJECT_PATH=null",
+ "NDK_OUT=obj",
+ "NDK_LIBS_OUT=libs",
]
diff --git a/tests/build/shell_quotation/test.py b/tests/build/shell_quotation/test.py
index 1b180ff..f3e6ed2 100644
--- a/tests/build/shell_quotation/test.py
+++ b/tests/build/shell_quotation/test.py
@@ -54,7 +54,7 @@
if not os.path.exists(cc_json):
return False, "{} does not exist".format(cc_json)
- with open(cc_json) as cc_json_file:
+ with open(cc_json, encoding="utf-8") as cc_json_file:
contents = json.load(cc_json_file)
command_default = contents[0]["command"]
command_short_local = contents[1]["command"]
diff --git a/tests/build/short-commands-escape/test_config.py b/tests/build/short-commands-escape/test_config.py
index 5caeff2..227d415 100644
--- a/tests/build/short-commands-escape/test_config.py
+++ b/tests/build/short-commands-escape/test_config.py
@@ -3,10 +3,10 @@
def build_unsupported(_test):
- if sys.platform != 'win32':
+ if sys.platform != "win32":
return sys.platform
return None
def extra_ndk_build_flags():
- return ['NDK_OUT=foo\\bar']
+ return ["NDK_OUT=foo\\bar"]
diff --git a/tests/build/signal/test_config.py b/tests/build/signal/test_config.py
index e9d5f66..fa599e6 100644
--- a/tests/build/signal/test_config.py
+++ b/tests/build/signal/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi in ('arm64-v8a', 'x86_64'):
+ if test.config.abi in ("arm64-v8a", "riscv64", "x86_64"):
return test.config.abi
return None
diff --git a/tests/build/ssax-instructions/test_config.py b/tests/build/ssax-instructions/test_config.py
index 84e726b..1db330b 100644
--- a/tests/build/ssax-instructions/test_config.py
+++ b/tests/build/ssax-instructions/test_config.py
@@ -1,4 +1,4 @@
def build_unsupported(test):
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
diff --git a/tests/build/standalone_toolchain/test.py b/tests/build/standalone_toolchain/test.py
index 4b803f1..3a911f8 100644
--- a/tests/build/standalone_toolchain/test.py
+++ b/tests/build/standalone_toolchain/test.py
@@ -18,6 +18,6 @@
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
- return ndk.testing.standalone_toolchain.run_test(ndk_path, config,
- 'foo.cpp',
- ['--stl=libc++'], [])
+ return ndk.testing.standalone_toolchain.run_test(
+ ndk_path, config, "foo.cpp", ["--stl=libc++"], []
+ )
diff --git a/tests/build/standalone_toolchain_thumb/test.py b/tests/build/standalone_toolchain_thumb/test.py
index 3fc2cbb..3060df7 100644
--- a/tests/build/standalone_toolchain_thumb/test.py
+++ b/tests/build/standalone_toolchain_thumb/test.py
@@ -19,4 +19,5 @@
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
return ndk.testing.standalone_toolchain.run_test(
- ndk_path, config, 'foo.cpp', ['--stl=libc++'], ['-mthumb'])
+ ndk_path, config, "foo.cpp", ["--stl=libc++"], ["-mthumb"]
+ )
diff --git a/tests/build/standalone_toolchain_thumb/test_config.py b/tests/build/standalone_toolchain_thumb/test_config.py
index 12c5785..096d881 100644
--- a/tests/build/standalone_toolchain_thumb/test_config.py
+++ b/tests/build/standalone_toolchain_thumb/test_config.py
@@ -4,6 +4,6 @@
def build_unsupported(test):
# -mthumb is only relevant for 32-bit ARM.
- if test.config.abi != 'armeabi-v7a':
+ if test.config.abi != "armeabi-v7a":
return test.config.abi
return None
diff --git a/tests/build/link_order/__init__.py b/tests/build/standalone_toolchainapi_too_low/__init__.py
similarity index 100%
rename from tests/build/link_order/__init__.py
rename to tests/build/standalone_toolchainapi_too_low/__init__.py
diff --git a/tests/build/standalone_toolchainapi_too_low/foo.cpp b/tests/build/standalone_toolchainapi_too_low/foo.cpp
new file mode 100644
index 0000000..fb7b6c7
--- /dev/null
+++ b/tests/build/standalone_toolchainapi_too_low/foo.cpp
@@ -0,0 +1,23 @@
+// Make sure we're not clobbering libc++ headers with libandroid_support.
+#include <cmath>
+
+// Use iostream instead of stdio.h to make sure we can actually get symbols from
+// libc++.so. Most of libc++ is defined in the headers, but std::cout is in the
+// library.
+#include <iostream>
+
+// If std::strings don't work then there's really no point :)
+#include <string>
+
+void foo(const std::string& s) {
+ // Using new makes sure we get libc++abi/libsupc++. Using std::string makes
+ // sure the STL works at all. Using std::cout makes sure we can access the
+ // library itself and not just the headers.
+ std::string* copy = new std::string(s);
+ std::cout << copy << std::endl;
+ delete copy;
+}
+
+int main(int, char**) {
+ foo("Hello, world!");
+}
diff --git a/tests/build/standalone_toolchainapi_too_low/test.py b/tests/build/standalone_toolchainapi_too_low/test.py
new file mode 100644
index 0000000..dd0a26c
--- /dev/null
+++ b/tests/build/standalone_toolchainapi_too_low/test.py
@@ -0,0 +1,42 @@
+#
+# Copyright (C) 2024 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from ndk.test.spec import BuildConfiguration
+import ndk.testing.standalone_toolchain
+import ndk.abis
+
+
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+ min_api_for_abi = ndk.abis.min_api_for_abi(config.abi)
+ arch = ndk.abis.abi_to_arch(config.abi)
+ success, out = ndk.testing.standalone_toolchain.run_test(
+ ndk_path, config, "foo.cpp", ["--api", str(min_api_for_abi - 1)], []
+ )
+ if success:
+ return (
+ False,
+ f"{min_api_for_abi} is below minimum supported OS version for "
+ f"{config.abi}, but was not rejected",
+ )
+ expected_error = (
+ f"{min_api_for_abi - 1} is less than minimum platform for {arch} "
+ f"({min_api_for_abi})"
+ )
+ if expected_error not in out:
+ return (
+ False,
+ f'expected error message ("{expected_error}") not seen in output: {out}',
+ )
+ return True, out
diff --git a/tests/build/strip/test.py b/tests/build/strip/test.py
index a0fcf36..6633618 100644
--- a/tests/build/strip/test.py
+++ b/tests/build/strip/test.py
@@ -21,9 +21,8 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks ndk-build V=1 output for --strip-unneeded flag."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
- verifier.expect_flag('--strip-unneeded')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.expect_flag("--strip-unneeded")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/strip_keep_symbols/test.py b/tests/build/strip_keep_symbols/test.py
index fe8f94f..9ff354e 100644
--- a/tests/build/strip_keep_symbols/test.py
+++ b/tests/build/strip_keep_symbols/test.py
@@ -21,10 +21,9 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks ndk-build V=1 output for --strip-debug flag."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
- verifier.expect_flag('--strip-debug')
- verifier.expect_not_flag('--strip-unneeded')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.expect_flag("--strip-debug")
+ verifier.expect_not_flag("--strip-unneeded")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/strip_keep_symbols_app/test.py b/tests/build/strip_keep_symbols_app/test.py
index fe8f94f..9ff354e 100644
--- a/tests/build/strip_keep_symbols_app/test.py
+++ b/tests/build/strip_keep_symbols_app/test.py
@@ -21,10 +21,9 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks ndk-build V=1 output for --strip-debug flag."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
- verifier.expect_flag('--strip-debug')
- verifier.expect_not_flag('--strip-unneeded')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.expect_flag("--strip-debug")
+ verifier.expect_not_flag("--strip-unneeded")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/strip_local_overrides_app/test.py b/tests/build/strip_local_overrides_app/test.py
index c237130..4c2df34 100644
--- a/tests/build/strip_local_overrides_app/test.py
+++ b/tests/build/strip_local_overrides_app/test.py
@@ -21,10 +21,9 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks ndk-build V=1 output for --strip-unneeded flag."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
- verifier.expect_not_flag('--strip-debug')
- verifier.expect_flag('--strip-unneeded')
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
+ verifier.expect_not_flag("--strip-debug")
+ verifier.expect_flag("--strip-unneeded")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/strip_none/test.py b/tests/build/strip_none/test.py
index ed70424..49d9068 100644
--- a/tests/build/strip_none/test.py
+++ b/tests/build/strip_none/test.py
@@ -21,12 +21,11 @@
from ndk.testing.flag_verifier import FlagVerifier
-def run_test(ndk_path: str,
- config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
"""Checks ndk-build V=1 output for lack of strip."""
- verifier = FlagVerifier(Path('project'), Path(ndk_path), config)
+ verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
# TODO: Fix this test.
# This test has always been wrong, since it was only doing whole word
# search for 'strip' and we call strip with its full path.
- verifier.expect_not_flag('strip')
+ verifier.expect_not_flag("strip")
return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/system-no-supc/test_config.py b/tests/build/system-no-supc/test_config.py
index e8b1a9b..1543ff0 100644
--- a/tests/build/system-no-supc/test_config.py
+++ b/tests/build/system-no-supc/test_config.py
@@ -3,4 +3,4 @@
def extra_cmake_flags():
- return ['-DANDROID_STL=system']
+ return ["-DANDROID_STL=system"]
diff --git a/tests/build/unwinder_hidden/test.py b/tests/build/unwinder_hidden/test.py
index 00b82ff..1451792 100644
--- a/tests/build/unwinder_hidden/test.py
+++ b/tests/build/unwinder_hidden/test.py
@@ -29,51 +29,56 @@
# 274: 00000000000223d8 8 FUNC GLOBAL DEFAULT 11 _Unwind_GetIP
# Group 1: Visibility
# Group 2: Name
- readelf_regex = re.compile(r'^.*?(\S+)\s+\d+\s+(\S+)$')
+ readelf_regex = re.compile(r"^.*?(\S+)\s+\d+\s+(\S+)$")
for line in output.splitlines():
match = readelf_regex.match(line)
if match is None:
continue
visibility, name = match.groups()
- if name.startswith('_Unwind') and visibility == 'DEFAULT':
+ if name.startswith("_Unwind") and visibility == "DEFAULT":
yield name
-def readelf(ndk_path: Path, host: ndk.hosts.Host, library: Path,
- *args: str) -> str:
+def readelf(ndk_path: Path, host: ndk.hosts.Host, library: Path, *args: str) -> str:
"""Runs readelf, returning the output."""
- readelf_path = (ndk_path / 'toolchains/llvm/prebuilt' /
- ndk.hosts.get_host_tag() / 'bin/llvm-readelf')
+ readelf_path = (
+ ndk_path
+ / "toolchains/llvm/prebuilt"
+ / ndk.hosts.get_host_tag()
+ / "bin/llvm-readelf"
+ )
if host.is_windows:
- readelf_path = readelf_path.with_suffix('.exe')
+ readelf_path = readelf_path.with_suffix(".exe")
return subprocess.run(
[str(readelf_path), *args, str(library)],
check=True,
- encoding='utf-8',
+ encoding="utf-8",
stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT).stdout
+ stderr=subprocess.STDOUT,
+ ).stdout
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
"""Check that unwinder symbols are hidden in outputs."""
- ndk_build = Path(ndk_path) / 'ndk-build'
+ ndk_build = Path(ndk_path) / "ndk-build"
host = ndk.hosts.get_default_host()
if host.is_windows:
- ndk_build = ndk_build.with_suffix('.cmd')
- project_path = Path('project')
+ ndk_build = ndk_build.with_suffix(".cmd")
+ project_path = Path("project")
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
]
subprocess.run(
- [str(ndk_build), '-C', str(project_path)] + ndk_args,
+ [str(ndk_build), "-C", str(project_path)] + ndk_args,
check=True,
stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ stderr=subprocess.STDOUT,
+ )
- library = project_path / 'libs' / str(config.abi) / 'libfoo.so'
- readelf_output = readelf(Path(ndk_path), host, library, '-sW')
+ library = project_path / "libs" / str(config.abi) / "libfoo.so"
+ readelf_output = readelf(Path(ndk_path), host, library, "-sW")
for symbol in find_public_unwind_symbols(readelf_output):
- return False, f'Found public unwind symbol: {symbol}'
- return True, ''
+ return False, f"Found public unwind symbol: {symbol}"
+ return True, ""
diff --git a/tests/device/weak_symbols_off_by_default/CMakeLists.txt b/tests/build/weak_symbols_off_by_default/CMakeLists.txt
similarity index 100%
rename from tests/device/weak_symbols_off_by_default/CMakeLists.txt
rename to tests/build/weak_symbols_off_by_default/CMakeLists.txt
diff --git a/tests/device/weak_symbols_off_by_default/jni/Android.mk b/tests/build/weak_symbols_off_by_default/jni/Android.mk
similarity index 100%
rename from tests/device/weak_symbols_off_by_default/jni/Android.mk
rename to tests/build/weak_symbols_off_by_default/jni/Android.mk
diff --git a/tests/device/weak_symbols_off_by_default/jni/Application.mk b/tests/build/weak_symbols_off_by_default/jni/Application.mk
similarity index 100%
rename from tests/device/weak_symbols_off_by_default/jni/Application.mk
rename to tests/build/weak_symbols_off_by_default/jni/Application.mk
diff --git a/tests/build/weak_symbols_off_by_default/jni/weak_symbols.cpp b/tests/build/weak_symbols_off_by_default/jni/weak_symbols.cpp
new file mode 100644
index 0000000..80815ad
--- /dev/null
+++ b/tests/build/weak_symbols_off_by_default/jni/weak_symbols.cpp
@@ -0,0 +1,10 @@
+#include <android/versioning.h>
+
+// Create an unavailable symbol that's set to an availability version
+// higher than any ABI's minimum SDK version.
+extern "C" void AFoo() __INTRODUCED_IN(100);
+
+int main(int, char**) {
+ AFoo();
+ return 0;
+}
diff --git a/tests/device/weak_symbols_off_by_default/test_config.py b/tests/build/weak_symbols_off_by_default/test_config.py
similarity index 100%
rename from tests/device/weak_symbols_off_by_default/test_config.py
rename to tests/build/weak_symbols_off_by_default/test_config.py
diff --git a/tests/device/weak_symbols_unguarded_availability/CMakeLists.txt b/tests/build/weak_symbols_unguarded_availability/CMakeLists.txt
similarity index 100%
rename from tests/device/weak_symbols_unguarded_availability/CMakeLists.txt
rename to tests/build/weak_symbols_unguarded_availability/CMakeLists.txt
diff --git a/tests/device/weak_symbols_unguarded_availability/jni/Android.mk b/tests/build/weak_symbols_unguarded_availability/jni/Android.mk
similarity index 100%
rename from tests/device/weak_symbols_unguarded_availability/jni/Android.mk
rename to tests/build/weak_symbols_unguarded_availability/jni/Android.mk
diff --git a/tests/device/weak_symbols_unguarded_availability/jni/Application.mk b/tests/build/weak_symbols_unguarded_availability/jni/Application.mk
similarity index 100%
rename from tests/device/weak_symbols_unguarded_availability/jni/Application.mk
rename to tests/build/weak_symbols_unguarded_availability/jni/Application.mk
diff --git a/tests/build/weak_symbols_unguarded_availability/jni/weak_symbols.cpp b/tests/build/weak_symbols_unguarded_availability/jni/weak_symbols.cpp
new file mode 100644
index 0000000..80815ad
--- /dev/null
+++ b/tests/build/weak_symbols_unguarded_availability/jni/weak_symbols.cpp
@@ -0,0 +1,10 @@
+#include <android/versioning.h>
+
+// Create an unavailable symbol that's set to an availability version
+// higher than any ABI's minimum SDK version.
+extern "C" void AFoo() __INTRODUCED_IN(100);
+
+int main(int, char**) {
+ AFoo();
+ return 0;
+}
diff --git a/tests/device/weak_symbols_unguarded_availability/test_config.py b/tests/build/weak_symbols_unguarded_availability/test_config.py
similarity index 100%
rename from tests/device/weak_symbols_unguarded_availability/test_config.py
rename to tests/build/weak_symbols_unguarded_availability/test_config.py
diff --git a/tests/build/wrap_sh/project/jni/Application.mk b/tests/build/wrap_sh/project/jni/Application.mk
index 5622b10..470969c 100644
--- a/tests/build/wrap_sh/project/jni/Application.mk
+++ b/tests/build/wrap_sh/project/jni/Application.mk
@@ -1,4 +1,5 @@
APP_WRAP_SH_armeabi-v7a := armeabi-v7a.sh
APP_WRAP_SH_arm64-v8a := arm64-v8a.sh
+APP_WRAP_SH_riscv64 := riscv64.sh
APP_WRAP_SH_x86:= x86.sh
APP_WRAP_SH_x86_64:= x86_64.sh
diff --git a/tests/build/wrap_sh/project/jni/riscv64.sh b/tests/build/wrap_sh/project/jni/riscv64.sh
new file mode 100644
index 0000000..52f1642
--- /dev/null
+++ b/tests/build/wrap_sh/project/jni/riscv64.sh
@@ -0,0 +1 @@
+riscv64
diff --git a/tests/build/wrap_sh/test.py b/tests/build/wrap_sh/test.py
index 6cf9744..5ea1057 100644
--- a/tests/build/wrap_sh/test.py
+++ b/tests/build/wrap_sh/test.py
@@ -19,37 +19,42 @@
import subprocess
import sys
import textwrap
+from pathlib import Path
from ndk.test.spec import BuildConfiguration
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
"""Checks that the proper wrap.sh scripts were installed."""
- ndk_build = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
+ ndk_build = os.path.join(ndk_path, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build += ".cmd"
+ project_path = Path("project")
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8')
+ proc = subprocess.Popen(
+ [ndk_build, "-C", str(project_path)] + ndk_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ )
out, _ = proc.communicate()
if proc.returncode != 0:
return proc.returncode == 0, out
- wrap_sh = os.path.join(project_path, 'libs', config.abi, 'wrap.sh')
- if not os.path.exists(wrap_sh):
- return False, f'{wrap_sh} does not exist'
+ wrap_sh = project_path / "libs" / config.abi / "wrap.sh"
+ if not wrap_sh.exists():
+ return False, f"{wrap_sh} does not exist"
- with open(wrap_sh) as wrap_sh_file:
- contents = wrap_sh_file.read().strip()
+ contents = wrap_sh.read_text(encoding="utf-8").strip()
if contents != config.abi:
- return False, textwrap.dedent(f"""\
+ return False, textwrap.dedent(
+ f"""\
wrap.sh file had wrong contents:
Expected: {config.abi}
- Actual: {contents}""")
+ Actual: {contents}"""
+ )
- return True, ''
+ return True, ""
diff --git a/tests/build/wrap_sh_generic/test.py b/tests/build/wrap_sh_generic/test.py
index b9d8f9a..27058ae 100644
--- a/tests/build/wrap_sh_generic/test.py
+++ b/tests/build/wrap_sh_generic/test.py
@@ -19,37 +19,42 @@
import subprocess
import sys
import textwrap
+from pathlib import Path
from ndk.test.spec import BuildConfiguration
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
"""Checks that the proper wrap.sh scripts were installed."""
- ndk_build = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
+ ndk_build = os.path.join(ndk_path, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build += ".cmd"
+ project_path = Path("project")
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8')
+ proc = subprocess.Popen(
+ [ndk_build, "-C", str(project_path)] + ndk_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ )
out, _ = proc.communicate()
if proc.returncode != 0:
return proc.returncode == 0, out
- wrap_sh = os.path.join(project_path, 'libs', config.abi, 'wrap.sh')
- if not os.path.exists(wrap_sh):
- return False, '{} does not exist'.format(wrap_sh)
+ wrap_sh = project_path / "libs" / config.abi / "wrap.sh"
+ if not wrap_sh.exists():
+ return False, "{} does not exist".format(wrap_sh)
- with open(wrap_sh) as wrap_sh_file:
- contents = wrap_sh_file.read().strip()
- if contents != 'generic':
- return False, textwrap.dedent(f"""\
+ contents = wrap_sh.read_text(encoding="utf-8").strip()
+ if contents != "generic":
+ return False, textwrap.dedent(
+ f"""\
{config.abi} wrap.sh file had wrong contents:
Expected: generic
- Actual: {contents}""")
+ Actual: {contents}"""
+ )
- return True, ''
+ return True, ""
diff --git a/tests/build/wrap_sh_none/test.py b/tests/build/wrap_sh_none/test.py
index 8611614..e688333 100644
--- a/tests/build/wrap_sh_none/test.py
+++ b/tests/build/wrap_sh_none/test.py
@@ -23,22 +23,25 @@
def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
"""Checks that the proper wrap.sh scripts were installed."""
- ndk_build = os.path.join(ndk_path, 'ndk-build')
- if sys.platform == 'win32':
- ndk_build += '.cmd'
- project_path = 'project'
+ ndk_build = os.path.join(ndk_path, "ndk-build")
+ if sys.platform == "win32":
+ ndk_build += ".cmd"
+ project_path = "project"
ndk_args = [
- f'APP_ABI={config.abi}',
- f'APP_PLATFORM=android-{config.api}',
+ f"APP_ABI={config.abi}",
+ f"APP_PLATFORM=android-{config.api}",
]
- proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
- encoding='utf-8')
+ proc = subprocess.Popen(
+ [ndk_build, "-C", project_path] + ndk_args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding="utf-8",
+ )
out, _ = proc.communicate()
if proc.returncode != 0:
return proc.returncode == 0, out
- wrap_sh = os.path.join(project_path, 'libs', config.abi, 'wrap.sh')
+ wrap_sh = os.path.join(project_path, "libs", config.abi, "wrap.sh")
if os.path.exists(wrap_sh):
- return False, '{} should not exist'.format(wrap_sh)
- return True, ''
+ return False, "{} should not exist".format(wrap_sh)
+ return True, ""
diff --git a/tests/device/asan-smoke/jni/asan_oob_test.cc b/tests/device/asan-smoke/jni/asan_oob_test.cc
index 5228f36..18db3e7 100644
--- a/tests/device/asan-smoke/jni/asan_oob_test.cc
+++ b/tests/device/asan-smoke/jni/asan_oob_test.cc
@@ -6,10 +6,20 @@
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
+#include <stdint.h>
#include <stdio.h>
+#include <stdlib.h>
+
+#include <string>
#include <gtest/gtest.h>
+#if __LP64__ || defined(_WIN64)
+# define SANITIZER_WORDSIZE 64
+#else
+# define SANITIZER_WORDSIZE 32
+#endif
+
#define NOINLINE __attribute__((noinline))
typedef uint8_t U1;
@@ -26,6 +36,16 @@
#endif
}
+// This function returns its parameter but in such a way that compiler
+// can not prove it.
+template<class T>
+NOINLINE
+static T Ident(T t) {
+ T ret = t;
+ break_optimization(&ret);
+ return ret;
+}
+
NOINLINE void *malloc_fff(size_t size) {
void *res = malloc/**/(size); break_optimization(0); return res;}
NOINLINE void *malloc_eee(size_t size) {
@@ -48,6 +68,14 @@
*a = 0;
}
+NOINLINE void asan_write_sized_aligned(uint8_t *p, size_t size) {
+ EXPECT_EQ(0U, ((uintptr_t)p % size));
+ if (size == 1) asan_write((uint8_t*)p);
+ else if (size == 2) asan_write((uint16_t*)p);
+ else if (size == 4) asan_write((uint32_t*)p);
+ else if (size == 8) asan_write((uint64_t*)p);
+}
+
template<typename T>
NOINLINE void oob_test(int size, int off) {
char *p = (char*)malloc_aaa(size);
@@ -57,37 +85,53 @@
free_aaa(p);
}
+static std::string GetLeftOOBMessage(int off) {
+ char str[100];
+ sprintf(str, "is located.*%d byte.*before", off);
+ return str;
+}
+
+static std::string GetRightOOBMessage(int off) {
+ char str[100];
+#if !defined(_WIN32)
+ // FIXME: Fix PR42868 and remove SEGV match.
+ sprintf(str, "is located.*%d byte.*after|SEGV", off);
+#else
+ // `|` doesn't work in googletest's regexes on Windows,
+ // see googletest/docs/advanced.md#regular-expression-syntax
+ // But it's not needed on Windows anyways.
+ sprintf(str, "is located.*%d byte.*after", off);
+#endif
+ return str;
+}
+
template<typename T>
void OOBTest() {
- char expected_str[100];
for (int size = sizeof(T); size < 20; size += 5) {
- for (int i = -5; i < 0; i++) {
- const char *str =
- "is located.*%d byte.*to the left";
- sprintf(expected_str, str, abs(i));
- EXPECT_DEATH(oob_test<T>(size, i), expected_str);
- }
+ for (int i = -5; i < 0; i++)
+ EXPECT_DEATH(oob_test<T>(size, i), GetLeftOOBMessage(-i));
for (int i = 0; i < (int)(size - sizeof(T) + 1); i++)
oob_test<T>(size, i);
for (int i = size - sizeof(T) + 1; i <= (int)(size + 2 * sizeof(T)); i++) {
- const char *str =
- "is located.*%d byte.*to the right";
- int off = i >= size ? (i - size) : 0;
// we don't catch unaligned partially OOB accesses.
if (i % sizeof(T)) continue;
- sprintf(expected_str, str, off);
- EXPECT_DEATH(oob_test<T>(size, i), expected_str);
+ int off = i >= size ? (i - size) : 0;
+ EXPECT_DEATH(oob_test<T>(size, i), GetRightOOBMessage(off));
}
}
- EXPECT_DEATH(oob_test<T>(kLargeMalloc, -1),
- "is located.*1 byte.*to the left");
- EXPECT_DEATH(oob_test<T>(kLargeMalloc, kLargeMalloc),
- "is located.*0 byte.*to the right");
+ EXPECT_DEATH(oob_test<T>(kLargeMalloc, -1), GetLeftOOBMessage(1));
+ EXPECT_DEATH(oob_test<T>(kLargeMalloc, kLargeMalloc), GetRightOOBMessage(0));
}
+// TODO(glider): the following tests are EXTREMELY slow on Darwin:
+// AddressSanitizer.OOB_char (125503 ms)
+// AddressSanitizer.OOB_int (126890 ms)
+// AddressSanitizer.OOBRightTest (315605 ms)
+// AddressSanitizer.SimpleStackTest (366559 ms)
+
TEST(AddressSanitizer, OOB_char) {
OOBTest<U1>();
}
@@ -95,3 +139,52 @@
TEST(AddressSanitizer, OOB_int) {
OOBTest<U4>();
}
+
+TEST(AddressSanitizer, OOBRightTest) {
+ size_t max_access_size = SANITIZER_WORDSIZE == 64 ? 8 : 4;
+ for (size_t access_size = 1; access_size <= max_access_size;
+ access_size *= 2) {
+ for (size_t alloc_size = 1; alloc_size <= 8; alloc_size++) {
+ for (size_t offset = 0; offset <= 8; offset += access_size) {
+ void *p = malloc(alloc_size);
+ // allocated: [p, p + alloc_size)
+ // accessed: [p + offset, p + offset + access_size)
+ uint8_t *addr = (uint8_t*)p + offset;
+ if (offset + access_size <= alloc_size) {
+ asan_write_sized_aligned(addr, access_size);
+ } else {
+ int outside_bytes = offset > alloc_size ? (offset - alloc_size) : 0;
+ EXPECT_DEATH(asan_write_sized_aligned(addr, access_size),
+ GetRightOOBMessage(outside_bytes));
+ }
+ free(p);
+ }
+ }
+ }
+}
+
+TEST(AddressSanitizer, LargeOOBRightTest) {
+ size_t large_power_of_two = 1 << 19;
+ for (size_t i = 16; i <= 256; i *= 2) {
+ size_t size = large_power_of_two - i;
+ char *p = Ident(new char[size]);
+ EXPECT_DEATH(p[size] = 0, GetRightOOBMessage(0));
+ delete [] p;
+ }
+}
+
+TEST(AddressSanitizer, DISABLED_DemoOOBLeftLow) {
+ oob_test<U1>(10, -1);
+}
+
+TEST(AddressSanitizer, DISABLED_DemoOOBLeftHigh) {
+ oob_test<U1>(kLargeMalloc, -1);
+}
+
+TEST(AddressSanitizer, DISABLED_DemoOOBRightLow) {
+ oob_test<U1>(10, 10);
+}
+
+TEST(AddressSanitizer, DISABLED_DemoOOBRightHigh) {
+ oob_test<U1>(kLargeMalloc, kLargeMalloc);
+}
diff --git a/tests/device/asan-smoke/test_config.py b/tests/device/asan-smoke/test_config.py
index 8c739f4..c03cc5e 100644
--- a/tests/device/asan-smoke/test_config.py
+++ b/tests/device/asan-smoke/test_config.py
@@ -1,5 +1,6 @@
from optparse import Option
from typing import Optional
+
from ndk.test.devices import Device
from ndk.test.devicetest.case import TestCase
diff --git a/tests/device/b8708181-Vector4/test_config.py b/tests/device/b8708181-Vector4/test_config.py
new file mode 100644
index 0000000..1f99a58
--- /dev/null
+++ b/tests/device/b8708181-Vector4/test_config.py
@@ -0,0 +1,20 @@
+from typing import Optional
+
+from ndk.test.devicetest.case import TestCase
+
+
+def build_unsupported(test: TestCase) -> Optional[str]:
+ # Validate if vector types allocate the proper amount of alignment on
+ # architectures that support such instructions, when returning large
+ # composite types.
+ #
+ # Some architectures, like 'riscv64' may be excluded if they employ
+ # sizeless types. In this case, the vector types are incomplete and
+ # cannot be members of unions, classes or structures and must have
+ # automatic storage duration. As this particular test requires returning
+ # a large composite type and we cannot compose types with other sizeless
+ # types, this test can be skipped for the architecture.
+ if test.config.abi not in ("armeabi-v7a", "x86", "arm64-v8a", "x86_64"):
+ return test.config.abi
+
+ return None
diff --git a/tests/device/clone/jni/Android.mk b/tests/device/clone/jni/Android.mk
deleted file mode 100644
index 3fc68eb..0000000
--- a/tests/device/clone/jni/Android.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := clone
-LOCAL_SRC_FILES := clone.c
-include $(BUILD_EXECUTABLE)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := clone-static
-LOCAL_SRC_FILES := clone.c
-LOCAL_LDFLAGS += -static
-include $(BUILD_EXECUTABLE)
diff --git a/tests/device/clone/jni/Application.mk b/tests/device/clone/jni/Application.mk
deleted file mode 100644
index b448d58..0000000
--- a/tests/device/clone/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_ABI := all
-APP_PLATFORM := android-9
diff --git a/tests/device/clone/jni/clone.c b/tests/device/clone/jni/clone.c
deleted file mode 100644
index 266be58..0000000
--- a/tests/device/clone/jni/clone.c
+++ /dev/null
@@ -1,44 +0,0 @@
-#include <stdio.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <linux/sched.h>
-#include <stdlib.h>
-
-int v, fd;
-
-int child_proc()
-{
- v = 42;
- close(fd);
- exit(0);
-}
-
-#define STACK_SIZE 1024
-
-int main(int argc, char *argv[])
-{
- void **child_stack;
- char ch;
-
- v = 9;
- fd = open(argv[0], O_RDONLY);
- if (read(fd, &ch, 1) < 1) {
- printf("Can't read file");
- exit(1);
- }
- child_stack = (void **) malloc(STACK_SIZE * sizeof(void *));
- printf("v = %d\n", v);
-
- clone(child_proc, child_stack + STACK_SIZE, CLONE_VM|CLONE_FILES, NULL);
- sleep(1);
-
- printf("v = %d\n", v);
- if (read(fd, &ch, 1) < 1) {
- printf("Can't read file because it's closed by child.\n");
- return 0;
- } else {
- printf("We shouldn't be able to read from file which is closed by child.\n");
- return 0;
- }
-}
-
diff --git a/tests/device/clone/test_config.py b/tests/device/clone/test_config.py
deleted file mode 100644
index ab601fa..0000000
--- a/tests/device/clone/test_config.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def build_unsupported(test):
- if test.config.abi == "x86" and test.config.api < 17:
- return test.config.abi
- return None
diff --git a/tests/device/emutls-dealloc/test_config.py b/tests/device/emutls-dealloc/test_config.py
index cd33f8e..1059c13 100644
--- a/tests/device/emutls-dealloc/test_config.py
+++ b/tests/device/emutls-dealloc/test_config.py
@@ -1,4 +1,5 @@
from typing import Optional
+
from ndk.abis import Abi
from ndk.test.devices import Device
from ndk.test.devicetest.case import TestCase
diff --git a/tests/device/fuzzer/test_config.py b/tests/device/fuzzer/test_config.py
index 5061311..d675642 100644
--- a/tests/device/fuzzer/test_config.py
+++ b/tests/device/fuzzer/test_config.py
@@ -1,7 +1,7 @@
from typing import Optional
-from ndk.test.devices import DeviceConfig
from ndk.test.buildtest.case import Test
+from ndk.test.devices import DeviceConfig
def run_unsupported(test: Test, _device: DeviceConfig) -> Optional[str]:
diff --git a/tests/device/hwasan-smoke/CMakeLists.txt b/tests/device/hwasan-smoke/CMakeLists.txt
new file mode 100644
index 0000000..35e35ee
--- /dev/null
+++ b/tests/device/hwasan-smoke/CMakeLists.txt
@@ -0,0 +1,17 @@
+cmake_minimum_required(VERSION 3.6)
+project(CMakeDefaultFlagsTest CXX)
+
+set(GTEST_PATH "${ANDROID_NDK}/sources/third_party/googletest")
+
+add_library(hwasan-smoke-cmake SHARED
+ ${GTEST_PATH}/src/gtest-all.cc
+ jni/hwasan_oob_test.cc)
+
+add_executable(hwasan-smoke-cmake_exe
+ jni/hwasan_oob_test.cc
+ ${GTEST_PATH}/src/gtest-all.cc
+ ${GTEST_PATH}/src/gtest_main.cc
+)
+
+target_include_directories(hwasan-smoke-cmake PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
+target_include_directories(hwasan-smoke-cmake_exe PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
diff --git a/tests/device/hwasan-smoke/jni/Android.mk b/tests/device/hwasan-smoke/jni/Android.mk
new file mode 100644
index 0000000..106b78f
--- /dev/null
+++ b/tests/device/hwasan-smoke/jni/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := hwasan_smoke
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := hwasan_oob_test.cc
+LOCAL_CFLAGS := -fsanitize=hwaddress -fno-omit-frame-pointer
+# Remove -Wl,-dynamic-linker once https://reviews.llvm.org/D151388 makes it into NDK
+LOCAL_LDFLAGS := -fsanitize=hwaddress -Wl,-dynamic-linker,/system/bin/linker_hwasan64
+LOCAL_STATIC_LIBRARIES := googletest_main
+include $(BUILD_EXECUTABLE)
+
+$(call import-module,third_party/googletest)
diff --git a/tests/device/hwasan-smoke/jni/Application.mk b/tests/device/hwasan-smoke/jni/Application.mk
new file mode 100644
index 0000000..a506e41
--- /dev/null
+++ b/tests/device/hwasan-smoke/jni/Application.mk
@@ -0,0 +1,3 @@
+APP_ABI := arm64-v8a
+APP_STL := c++_shared
+APP_PLATFORM := android-34
diff --git a/tests/device/hwasan-smoke/jni/hwasan_oob_test.cc b/tests/device/hwasan-smoke/jni/hwasan_oob_test.cc
new file mode 100644
index 0000000..69bbf5e
--- /dev/null
+++ b/tests/device/hwasan-smoke/jni/hwasan_oob_test.cc
@@ -0,0 +1,23 @@
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include <gtest/gtest.h>
+
+#if !defined(__aarch64__)
+#error "HWASan is only supported on AArch64."
+#endif
+
+#if !__has_feature(hwaddress_sanitizer)
+#error "Want HWASan build"
+#endif
+
+
+TEST(HWAddressSanitizer, OOB) {
+ EXPECT_DEATH({
+ volatile char* x = const_cast<volatile char*>(reinterpret_cast<char*>(malloc(1)));
+ x[1] = '2';
+ }, ".*HWAddressSanitizer.*");
+}
diff --git a/tests/device/hwasan-smoke/test_config.py b/tests/device/hwasan-smoke/test_config.py
new file mode 100644
index 0000000..2908595
--- /dev/null
+++ b/tests/device/hwasan-smoke/test_config.py
@@ -0,0 +1,27 @@
+from ndk.test.devices import DeviceConfig
+from ndk.test.devicetest.case import TestCase
+
+
+def build_unsupported(test: TestCase) -> str | None:
+ if test.config.abi != "arm64-v8a":
+ return f"{test.config.abi}"
+ return None
+
+
+def run_unsupported(test: TestCase, device: DeviceConfig) -> str | None:
+ if device.version < 34:
+ return f"{device.version}"
+ return None
+
+
+def run_broken(test: TestCase, device: DeviceConfig) -> tuple[str | None, str | None]:
+ # FIXME: support c++_shared tests for cmake and re-enable
+ # currently the c++ library is not properly pushed so the
+ # test fails to link
+ if test.build_system == "cmake":
+ return f"{test.build_system}", "https://github.com/android/ndk/issues/1942"
+ return None, None
+
+
+def extra_cmake_flags() -> list[str]:
+ return ["-DANDROID_SANITIZE=hwaddress", "-DANDROID_STL=c++_shared"]
diff --git a/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c b/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c
index 49d35ee..4ffeefb 100644
--- a/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c
+++ b/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c
@@ -17,6 +17,7 @@
#include <stdio.h>
#include <signal.h>
#include <setjmp.h>
+#include <unistd.h>
static sigjmp_buf sbuf;
diff --git a/tests/device/memtag-smoke/CMakeLists.txt b/tests/device/memtag-smoke/CMakeLists.txt
new file mode 100644
index 0000000..d8b1b90
--- /dev/null
+++ b/tests/device/memtag-smoke/CMakeLists.txt
@@ -0,0 +1,17 @@
+cmake_minimum_required(VERSION 3.6)
+project(CMakeDefaultFlagsTest CXX)
+
+set(GTEST_PATH "${ANDROID_NDK}/sources/third_party/googletest")
+
+add_library(mte-smoke-cmake SHARED
+ ${GTEST_PATH}/src/gtest-all.cc
+ jni/mte_oob_test.cc)
+
+add_executable(mte-smoke-cmake_exe
+ jni/mte_oob_test.cc
+ ${GTEST_PATH}/src/gtest-all.cc
+ ${GTEST_PATH}/src/gtest_main.cc
+)
+
+target_include_directories(mte-smoke-cmake PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
+target_include_directories(mte-smoke-cmake_exe PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
diff --git a/tests/device/memtag-smoke/jni/Android.mk b/tests/device/memtag-smoke/jni/Android.mk
new file mode 100644
index 0000000..89d8fae
--- /dev/null
+++ b/tests/device/memtag-smoke/jni/Android.mk
@@ -0,0 +1,12 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := mte_smoke
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := mte_oob_test.cc
+LOCAL_CFLAGS := -fsanitize=memtag-stack -march=armv8-a+memtag -fno-omit-frame-pointer
+LOCAL_LDFLAGS := -fsanitize=memtag-stack,memtag-heap -fsanitize-memtag-mode=sync -march=armv8-a+memtag
+LOCAL_STATIC_LIBRARIES := googletest_main
+include $(BUILD_EXECUTABLE)
+
+$(call import-module,third_party/googletest)
diff --git a/tests/device/memtag-smoke/jni/Application.mk b/tests/device/memtag-smoke/jni/Application.mk
new file mode 100644
index 0000000..a506e41
--- /dev/null
+++ b/tests/device/memtag-smoke/jni/Application.mk
@@ -0,0 +1,3 @@
+APP_ABI := arm64-v8a
+APP_STL := c++_shared
+APP_PLATFORM := android-34
diff --git a/tests/device/memtag-smoke/jni/mte_oob_test.cc b/tests/device/memtag-smoke/jni/mte_oob_test.cc
new file mode 100644
index 0000000..a2b46dd
--- /dev/null
+++ b/tests/device/memtag-smoke/jni/mte_oob_test.cc
@@ -0,0 +1,20 @@
+#include <stdlib.h>
+
+#include <gtest/gtest.h>
+
+#if !defined(__aarch64__)
+#error "MTE is only supported on AArch64."
+#endif
+
+#if !__has_feature(memtag_stack)
+#error "Want MTE build"
+#endif
+
+
+TEST(Memtag, OOB) {
+ // Cannot assert the death message, because it doesn't get printed to stderr.
+ EXPECT_DEATH({
+ volatile char* x = const_cast<volatile char*>(reinterpret_cast<char*>(malloc(16)));
+ x[17] = '2';
+ }, "");
+}
diff --git a/tests/device/memtag-smoke/test_config.py b/tests/device/memtag-smoke/test_config.py
new file mode 100644
index 0000000..daccef7
--- /dev/null
+++ b/tests/device/memtag-smoke/test_config.py
@@ -0,0 +1,24 @@
+from ndk.test.devices import DeviceConfig
+from ndk.test.devicetest.case import TestCase
+
+
+def build_unsupported(test: TestCase) -> str | None:
+ if test.config.abi != "arm64-v8a":
+ return f"{test.config.abi}"
+ return None
+
+
+def run_unsupported(test: TestCase, device: DeviceConfig) -> str | None:
+ if device.version < 34:
+ return f"{device.version}"
+ if not device.supports_mte:
+ return "MTE not enabled"
+ return None
+
+
+def run_broken(test: TestCase, device: DeviceConfig) -> tuple[str | None, str | None]:
+ return None, None
+
+
+def extra_cmake_flags() -> list[str]:
+ return ["-DANDROID_SANITIZE=memtag"]
diff --git a/tests/device/openmp/jni/fib.c b/tests/device/openmp/jni/fib.c
index fb3e3e1..ffe316b 100644
--- a/tests/device/openmp/jni/fib.c
+++ b/tests/device/openmp/jni/fib.c
@@ -1,8 +1,11 @@
+#include <math.h>
#include <stdio.h>
#include <stdlib.h>
-#include <math.h>
-#include <omp.h>
+#include <sys/time.h>
#include <unistd.h>
+
+#include <omp.h>
+
#define MAX 33 //41
int Fibonacci(int n)
{ int x, y;
@@ -31,7 +34,7 @@
struct timeval time_start, time_end;
int i = 0;
// openmp related print message
- printf("CPU_ONLN= %d\n", sysconf(_SC_NPROCESSORS_ONLN));
+ printf("CPU_ONLN= %ld\n", sysconf(_SC_NPROCESSORS_ONLN));
printf("Number of CPUs=%d\n", omp_get_num_procs());
printf("Number of max threads=%d\n", omp_get_max_threads());
printf("Number of executing thread=%d\n", omp_get_thread_num());
diff --git a/tests/device/openmp/jni/openmp2.c b/tests/device/openmp/jni/openmp2.c
index 5ec15c9..1e05817 100644
--- a/tests/device/openmp/jni/openmp2.c
+++ b/tests/device/openmp/jni/openmp2.c
@@ -6,8 +6,8 @@
int main (int argc, char *argv[])
{
int nthreads, tid;
- printf("SC_NPROCESSORS_ONLN: %d\n", sysconf (_SC_NPROCESSORS_ONLN));
- printf("SC_NPROCESSORS_CONF: %d\n", sysconf (_SC_NPROCESSORS_CONF));
+ printf("SC_NPROCESSORS_ONLN: %ld\n", sysconf (_SC_NPROCESSORS_ONLN));
+ printf("SC_NPROCESSORS_CONF: %ld\n", sysconf (_SC_NPROCESSORS_CONF));
#pragma omp parallel default(shared) private(nthreads, tid)
/* Fork a team of threads giving them their own copies of variables */
{
diff --git a/tests/device/static-executable-exceptions/jni/Application.mk b/tests/device/static-executable-exceptions/jni/Application.mk
index 2133d20..9ec531a 100644
--- a/tests/device/static-executable-exceptions/jni/Application.mk
+++ b/tests/device/static-executable-exceptions/jni/Application.mk
@@ -1 +1 @@
-APP_PLATFORM := android-21
+APP_PLATFORM := latest
diff --git a/tests/device/static-executable-exceptions/test_config.py b/tests/device/static-executable-exceptions/test_config.py
index 9eea1ae..59c5cab 100644
--- a/tests/device/static-executable-exceptions/test_config.py
+++ b/tests/device/static-executable-exceptions/test_config.py
@@ -1,7 +1,13 @@
-def build_unsupported(test):
- # Static executables with libc++ require targeting a new enough API level
- # to not need libandroid_support.
- if test.config.api < 21:
- return f"android-{test.config.api}"
+import ndk.abis
+from ndk.test.buildtest.case import Test
- return None
+
+def extra_cmake_flags() -> list[str]:
+ # Required for static executables.
+ return ["-DANDROID_PLATFORM=latest"]
+
+
+def override_runtime_minsdkversion(test: Test) -> int | None:
+ # We build as latest because static executables require that, but static executables
+ # are compatible with old OS versions.
+ return ndk.abis.min_api_for_abi(test.config.abi)
diff --git a/tests/device/static-executable/jni/Application.mk b/tests/device/static-executable/jni/Application.mk
new file mode 100644
index 0000000..9ec531a
--- /dev/null
+++ b/tests/device/static-executable/jni/Application.mk
@@ -0,0 +1 @@
+APP_PLATFORM := latest
diff --git a/tests/device/static-executable/test_config.py b/tests/device/static-executable/test_config.py
index 481aeca..59c5cab 100644
--- a/tests/device/static-executable/test_config.py
+++ b/tests/device/static-executable/test_config.py
@@ -1,5 +1,13 @@
-def extra_cmake_flags():
- # Match the ndk-build test. Using libc++ here would require us to target a
- # newer API level since static executables and libandroid_support don't
- # mix.
- return ["-DANDROID_STL=system"]
+import ndk.abis
+from ndk.test.buildtest.case import Test
+
+
+def extra_cmake_flags() -> list[str]:
+ # Required for static executables.
+ return ["-DANDROID_PLATFORM=latest"]
+
+
+def override_runtime_minsdkversion(test: Test) -> int | None:
+ # We build as latest because static executables require that, but static executables
+ # are compatible with old OS versions.
+ return ndk.abis.min_api_for_abi(test.config.abi)
diff --git a/tests/device/static_exe_lto/jni/Android.mk b/tests/device/static_exe_lto/jni/Android.mk
new file mode 100644
index 0000000..1e3e0d3
--- /dev/null
+++ b/tests/device/static_exe_lto/jni/Android.mk
@@ -0,0 +1,8 @@
+# Regression test for https://github.com/android/ndk/issues/1461.
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo
+LOCAL_SRC_FILES := foo.cpp
+LOCAL_LDFLAGS := -static -flto
+include $(BUILD_EXECUTABLE)
diff --git a/tests/device/static_exe_lto/jni/foo.cpp b/tests/device/static_exe_lto/jni/foo.cpp
new file mode 100644
index 0000000..5b7bef5
--- /dev/null
+++ b/tests/device/static_exe_lto/jni/foo.cpp
@@ -0,0 +1,21 @@
+#include <stdio.h>
+
+static bool global_ctor_called = false;
+
+struct SideEffectClass {
+ SideEffectClass() {
+ global_ctor_called = true;
+ }
+};
+
+static SideEffectClass global{};
+
+int main(int, char**) {
+ // Regression test for https://github.com/android/ndk/issues/1461. Without the
+ // fix, the global constructor will not have been called.
+ if (!global_ctor_called) {
+ fprintf(stderr, "Global constructor was not called before main\n");
+ return 1;
+ }
+ return 0;
+}
\ No newline at end of file
diff --git a/tests/device/test-cpufeatures/jni/test_arm_idiv.c b/tests/device/test-cpufeatures/jni/test_arm_idiv.c
index bc115f2..7f64e5a 100644
--- a/tests/device/test-cpufeatures/jni/test_arm_idiv.c
+++ b/tests/device/test-cpufeatures/jni/test_arm_idiv.c
@@ -13,11 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-#include <cpu-features.h>
+#include <memory.h>
#include <setjmp.h>
#include <stdio.h>
#include <signal.h>
+#include <cpu-features.h>
+
#ifndef __arm__
#error "Only compile this file for an ARM target"
#endif
diff --git a/tests/device/tradefed-template.xml b/tests/device/tradefed-template.xml
deleted file mode 100644
index 1f00fb5..0000000
--- a/tests/device/tradefed-template.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version='1.0' encoding='utf-8'?>
-<configuration> <!-- Description will be filled in automatically. -->
- <option name="test-suite-tag" value="ndk-tests" />
- <object type="module_controller" class="com.android.tradefed.testtype.suite.module.ArchModuleController">
- <!-- <option name="arch" ...> will be filled in automatically. -->
- </object>
- <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer">
- <!-- <option name="push-file" ...> will be filled in automatically. -->
- <!-- <option name="post-push" ...> will be filled in automatically. -->
- <option name="cleanup" value="true" />
- <option name="skip-abi-filtering" value="true" />
- </target_preparer>
- <test class="com.android.tradefed.testtype.binary.ExecutableTargetTest">
- <option name="skip-binary-check" value="true" />
- <!-- <option name="test-command-line" ...> will be filled in automatically. -->
- </test>
-</configuration>
\ No newline at end of file
diff --git a/tests/device/tsan_smoke/test_config.py b/tests/device/tsan_smoke/test_config.py
index 4071f83..d31cf01 100644
--- a/tests/device/tsan_smoke/test_config.py
+++ b/tests/device/tsan_smoke/test_config.py
@@ -1,8 +1,18 @@
+from ndk.test.devices import Device
+from ndk.test.devicetest.case import TestCase
+
+
def build_unsupported(test):
- if test.config.is_lp32:
+ # TODO(https://github.com/google/android-riscv64/issues/104): Add TSAN when it
+ # builds for RISCV64.
+ if test.config.is_lp32 or test.config.abi == "riscv64":
return test.config.abi
return None
+def run_unsupported(test: TestCase, device: Device) -> str | None:
+ return "runs indefinitely with latest clang"
+
+
def run_broken(test, device):
return "all", "https://github.com/android/ndk/issues/1171"
diff --git a/tests/device/weak_symbols/test_config.py b/tests/device/weak_symbols/test_config.py
new file mode 100644
index 0000000..d3387f6
--- /dev/null
+++ b/tests/device/weak_symbols/test_config.py
@@ -0,0 +1,8 @@
+from ndk.test.spec import WeakSymbolsConfig
+
+
+def build_unsupported(test) -> bool:
+ # skip this test to avoid redefining __ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
+ if test.config.weak_symbol == WeakSymbolsConfig.WeakAPI:
+ return test.config.weak_symbol
+ return None
diff --git a/tests/device/weak_symbols_build_support/test_config.py b/tests/device/weak_symbols_build_support/test_config.py
index 23e9863..719e372 100644
--- a/tests/device/weak_symbols_build_support/test_config.py
+++ b/tests/device/weak_symbols_build_support/test_config.py
@@ -1,2 +1,12 @@
+from ndk.test.spec import WeakSymbolsConfig
+
+
def extra_cmake_flags() -> list[str]:
return ["-DANDROID_WEAK_API_DEFS=ON"]
+
+
+def build_unsupported(test) -> bool:
+ # skip this test to avoid redefining __ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
+ if test.config.weak_symbol == WeakSymbolsConfig.WeakAPI:
+ return test.config.weak_symbol
+ return None
diff --git a/tests/device/weak_symbols_off_by_default/jni/weak_symbols.cpp b/tests/device/weak_symbols_off_by_default/jni/weak_symbols.cpp
deleted file mode 100644
index bc596e9..0000000
--- a/tests/device/weak_symbols_off_by_default/jni/weak_symbols.cpp
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <android/trace.h>
-
-int main(int, char**) {
- ATrace_beginAsyncSection("ndk::asyncBeginEndSection", 0);
- return 0;
-}
\ No newline at end of file
diff --git a/tests/device/weak_symbols_unguarded_availability/jni/weak_symbols.cpp b/tests/device/weak_symbols_unguarded_availability/jni/weak_symbols.cpp
deleted file mode 100644
index bc596e9..0000000
--- a/tests/device/weak_symbols_unguarded_availability/jni/weak_symbols.cpp
+++ /dev/null
@@ -1,6 +0,0 @@
-#include <android/trace.h>
-
-int main(int, char**) {
- ATrace_beginAsyncSection("ndk::asyncBeginEndSection", 0);
- return 0;
-}
\ No newline at end of file
diff --git a/tests/libc++/test/README.md b/tests/libc++/test/README.md
deleted file mode 100644
index 39eacb2..0000000
--- a/tests/libc++/test/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-This is a fake test directory so we can add the libc++ test runner to the NDK
-test harness. The real tests are contained in the libc++ packaged in the NDK.
diff --git a/tests/libc++/test/libcxx/debug/test_config.py b/tests/libc++/test/libcxx/debug/test_config.py
deleted file mode 100644
index a0493f0..0000000
--- a/tests/libc++/test/libcxx/debug/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_unsupported(test, device):
- # Can't replace SIGABRT on old releases.
- if device.version < 21 and test.case_name == "debug_abort.pass":
- return device.version
- return None
diff --git a/tests/libc++/test/libcxx/strings/c.strings/test_config.py b/tests/libc++/test/libcxx/strings/c.strings/test_config.py
deleted file mode 100644
index ca9ca45..0000000
--- a/tests/libc++/test/libcxx/strings/c.strings/test_config.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def build_broken(test):
- if test.case_name == "version_cuchar.pass":
- return "all", "http://b/63679176"
- return None, None
diff --git a/tests/libc++/test/std/depr/depr.c.headers/test_config.py b/tests/libc++/test/std/depr/depr.c.headers/test_config.py
deleted file mode 100644
index e3d7798..0000000
--- a/tests/libc++/test/std/depr/depr.c.headers/test_config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def build_broken(test):
- if test.case_name == "math_h_isnan.pass":
- return "all", "http://b/34724220"
- if test.case_name == "math_h_isinf.pass" and test.config.api >= 21:
- return f"android-{test.config.api}", "http://b/34724220"
- return None, None
diff --git a/tests/libc++/test/std/input.output/file.streams/fstreams/filebuf.members/test_config.py b/tests/libc++/test/std/input.output/file.streams/fstreams/filebuf.members/test_config.py
deleted file mode 100644
index e30ba15..0000000
--- a/tests/libc++/test/std/input.output/file.streams/fstreams/filebuf.members/test_config.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from typing import Optional
-
-from ndk.test.devices import DeviceConfig
-from ndk.test.buildtest.case import Test
-
-
-def run_broken(test: Test, device: DeviceConfig) -> tuple[Optional[str], Optional[str]]:
- if test.case_name == "close.pass" and device.version >= 31:
- return (
- f"device API level {device.version}",
- "https://github.com/android/ndk/issues/1626",
- )
- return None, None
diff --git a/tests/libc++/test/std/input.output/stream.buffers/streambuf/streambuf.protected/streambuf.put.area/test_config.py b/tests/libc++/test/std/input.output/stream.buffers/streambuf/streambuf.protected/streambuf.put.area/test_config.py
deleted file mode 100644
index b0d0a07..0000000
--- a/tests/libc++/test/std/input.output/stream.buffers/streambuf/streambuf.protected/streambuf.put.area/test_config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def run_unsupported(test, _device):
- if test.case_name == "pbump2gig.pass":
- # This test attempts to allocate 2GiB of 'a', which doesn't work on a
- # mobile device.
- return "all"
- return None
diff --git a/tests/libc++/test/std/iterators/iterator.primitives/iterator.traits/test_config.py b/tests/libc++/test/std/iterators/iterator.primitives/iterator.traits/test_config.py
deleted file mode 100644
index 3eb6286..0000000
--- a/tests/libc++/test/std/iterators/iterator.primitives/iterator.traits/test_config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def build_broken(test):
- if test.case_name == "empty.fail":
- # Format of the diagnostic changed in clang and we don't have the
- # libc++ update to match (https://reviews.llvm.org/D92239).
- return "all", "https://github.com/android/ndk/issues/1454"
- return None, None
diff --git a/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.array/test_config.py b/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.array/test_config.py
deleted file mode 100644
index c22b8dd..0000000
--- a/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.array/test_config.py
+++ /dev/null
@@ -1,10 +0,0 @@
-def run_broken(test, device):
- failing_tests = [
- "delete_align_val_t_replace.pass",
- "new_align_val_t_nothrow_replace.pass",
- "new_array_nothrow_replace.pass",
- "new_array_replace.pass",
- ]
- if test.case_name in failing_tests and device.version < 18:
- return f"android-{device.version}", "http://b/2643900"
- return None, None
diff --git a/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.single/test_config.py b/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.single/test_config.py
deleted file mode 100644
index 8467985..0000000
--- a/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.single/test_config.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def run_broken(test, device):
- failing_tests = [
- "new_align_val_t_nothrow_replace.pass",
- "new_nothrow_replace.pass",
- ]
- if test.case_name in failing_tests and device.version < 18:
- return f"android-{device.version}", "http://b/2643900"
- return None, None
diff --git a/tests/libc++/test/std/localization/locale.categories/category.ctype/facet.ctype.special/facet.ctype.char.dtor/test_config.py b/tests/libc++/test/std/localization/locale.categories/category.ctype/facet.ctype.special/facet.ctype.char.dtor/test_config.py
deleted file mode 100644
index 5e0051e..0000000
--- a/tests/libc++/test/std/localization/locale.categories/category.ctype/facet.ctype.special/facet.ctype.char.dtor/test_config.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def run_broken(test, device):
- if test.case_name == "dtor.pass" and device.version < 18:
- return f"android-{device.version}", "http://b/2643900"
- return None, None
diff --git a/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.nm.put/facet.num.put.members/test_config.py b/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.nm.put/facet.num.put.members/test_config.py
deleted file mode 100644
index 18d0ed1..0000000
--- a/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.nm.put/facet.num.put.members/test_config.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from typing import Optional
-
-from ndk.test.devices import Device
-from ndk.test.devicetest.case import LibcxxTestCase
-
-
-def run_broken(
- test: LibcxxTestCase, device: Device
-) -> tuple[Optional[str], Optional[str]]:
- if test.case_name == "put_long_double.pass" and device.version > 21:
- # libc++ expects only one format of positive nan. At some point this changed.
- # The API level above will likely need to be changed as we test on other old API
- # levels.
- return f"{test.config.abi} OS {device.version}", "http://b/34950416"
- percent_f_tests = ("put_double.pass", "put_long_double.pass")
- if test.case_name in percent_f_tests and device.version < 21:
- return f"android-{device.version}", "http://b/35764716"
- return None, None
diff --git a/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.num.get/facet.num.get.members/test_config.py b/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.num.get/facet.num.get.members/test_config.py
deleted file mode 100644
index 7c6f7f2..0000000
--- a/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.num.get/facet.num.get.members/test_config.py
+++ /dev/null
@@ -1,18 +0,0 @@
-def run_broken(test, device):
- is_lp64 = test.config.abi in ("arm64-v8a", "x86_64")
- failing_tests = ("get_long_double.pass",)
- if is_lp64 and device.version < 26 and test.case_name in failing_tests:
- return f"android-{device.version}", "http://b/31101647"
-
- if test.case_name == "get_float.pass" and device.version < 21:
- return test.config.abi, "https://github.com/android-ndk/ndk/issues/415"
-
- percent_a_tests = (
- "get_double.pass",
- "get_long_double.pass",
- )
- if test.case_name in percent_a_tests and device.version < 21:
- bug = "https://github.com/android-ndk/ndk/issues/437"
- return f"android-{device.version}", bug
-
- return None, None
diff --git a/tests/libc++/test/std/numerics/c.math/test_config.py b/tests/libc++/test/std/numerics/c.math/test_config.py
deleted file mode 100644
index 5a95f85..0000000
--- a/tests/libc++/test/std/numerics/c.math/test_config.py
+++ /dev/null
@@ -1,9 +0,0 @@
-def build_broken(test):
- if test.case_name == "cmath_isnan.pass":
- return "all", "http://b/34724220"
- if test.case_name == "cmath_isinf.pass" and test.config.api >= 21:
- return f"android-{test.config.api}", "http://b/34724220"
- if test.case_name == "abs.fail" and test.config.api < 19:
- bug = "https://github.com/android/ndk/issues/1237"
- return f"android-{test.config.api}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bernoulli/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bernoulli/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bernoulli/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bin/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bin/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bin/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.geo/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.geo/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.geo/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.negbin/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.negbin/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.negbin/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.cauchy/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.cauchy/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.cauchy/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.chisq/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.chisq/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.chisq/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.f/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.f/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.f/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.lognormal/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.lognormal/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.lognormal/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.t/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.t/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.t/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.exp/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.exp/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.exp/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.extreme/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.extreme/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.extreme/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.gamma/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.gamma/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.gamma/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.poisson/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.poisson/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.poisson/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.weibull/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.weibull/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.weibull/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.discrete/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.discrete/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.discrete/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.pconst/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.pconst/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.pconst/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.plinear/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.plinear/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.plinear/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.uni/rand.dist.uni.real/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.uni/rand.dist.uni.real/test_config.py
deleted file mode 100644
index 01159e0..0000000
--- a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.uni/rand.dist.uni.real/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- if device.version < 21 and test.case_name == "io.pass":
- bug = "https://issuetracker.google.com/36988114"
- return f"android-{device.version}", bug
- return None, None
diff --git a/tests/libc++/test/std/strings/c.strings/test_config.py b/tests/libc++/test/std/strings/c.strings/test_config.py
deleted file mode 100644
index ca245c2..0000000
--- a/tests/libc++/test/std/strings/c.strings/test_config.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def build_broken(test):
- if test.case_name == "cuchar.pass":
- return "all", "http://b/63679176"
- return None, None
diff --git a/tests/libc++/test/std/strings/string.conversions/test_config.py b/tests/libc++/test/std/strings/string.conversions/test_config.py
deleted file mode 100644
index ffdf201..0000000
--- a/tests/libc++/test/std/strings/string.conversions/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def run_broken(test, device):
- is_lp64 = test.config.abi in ("arm64-v8a", "x86_64")
- if device.version < 26 and is_lp64 and test.case_name == "stold.pass":
- return f"android-{device.version}", "http://b/31101647"
- return None, None
diff --git a/tests/libc++/test/std/thread/thread.threads/thread.thread.class/thread.thread.constr/test_config.py b/tests/libc++/test/std/thread/thread.threads/thread.thread.class/thread.thread.constr/test_config.py
deleted file mode 100644
index 0410e09..0000000
--- a/tests/libc++/test/std/thread/thread.threads/thread.thread.class/thread.thread.constr/test_config.py
+++ /dev/null
@@ -1,4 +0,0 @@
-def run_broken(test, device):
- if test.case_name == "F.pass" and device.version < 18:
- return f"android-{device.version}", "http://b/2643900"
- return None, None
diff --git a/tests/libc++/test/std/utilities/any/any.nonmembers/any.cast/test_config.py b/tests/libc++/test/std/utilities/any/any.nonmembers/any.cast/test_config.py
deleted file mode 100644
index ab28faa..0000000
--- a/tests/libc++/test/std/utilities/any/any.nonmembers/any.cast/test_config.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def build_broken(test):
- if (
- test.case_name == "const_correctness.fail"
- or test.case_name == "not_copy_constructible.fail"
- ):
- # This is XFAIL: clang and libc++ are out of sync.
- return "all", "https://github.com/android/ndk/issues/1530"
- return None, None
diff --git a/tests/libc++/test/std/utilities/smartptr/unique.ptr/unique.ptr.class/unique.ptr.observers/test_config.py b/tests/libc++/test/std/utilities/smartptr/unique.ptr/unique.ptr.class/unique.ptr.observers/test_config.py
deleted file mode 100644
index 47cd3a9..0000000
--- a/tests/libc++/test/std/utilities/smartptr/unique.ptr/unique.ptr.class/unique.ptr.observers/test_config.py
+++ /dev/null
@@ -1,8 +0,0 @@
-def build_broken(test):
- if (
- test.case_name == "dereference.runtime.fail"
- or test.case_name == "op_arrow.runtime.fail"
- ):
- # This is XFAIL: clang and libc++ are out of sync.
- return "all", "https://github.com/android/ndk/issues/1530"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.day/time.cal.day.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.day/time.cal.day.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.day/time.cal.day.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.md/time.cal.md.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.md/time.cal.md.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.md/time.cal.md.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.mdlast/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.mdlast/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.mdlast/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.month/time.cal.month.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.month/time.cal.month.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.month/time.cal.month.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.mwd/time.cal.mwd.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.mwd/time.cal.mwd.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.mwd/time.cal.mwd.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.mwdlast/time.cal.mwdlast.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.mwdlast/time.cal.mwdlast.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.mwdlast/time.cal.mwdlast.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.wdidx/time.cal.wdidx.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.wdidx/time.cal.wdidx.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.wdidx/time.cal.wdidx.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.wdlast/time.cal.wdlast.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.wdlast/time.cal.wdlast.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.wdlast/time.cal.wdlast.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.weekday/time.cal.weekday.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.weekday/time.cal.weekday.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.weekday/time.cal.weekday.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.year/time.cal.year.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.year/time.cal.year.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.year/time.cal.year.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ym/time.cal.ym.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.ym/time.cal.ym.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ym/time.cal.ym.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymd/time.cal.ymd.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymd/time.cal.ymd.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymd/time.cal.ymd.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymdlast/time.cal.ymdlast.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymdlast/time.cal.ymdlast.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymdlast/time.cal.ymdlast.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymwd/time.cal.ymwd.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymwd/time.cal.ymwd.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymwd/time.cal.ymwd.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymwdlast/time.cal.ymwdlast.nonmembers/test_config.py b/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymwdlast/time.cal.ymwdlast.nonmembers/test_config.py
deleted file mode 100644
index 2f1afb3..0000000
--- a/tests/libc++/test/std/utilities/time/time.cal/time.cal.ymwdlast/time.cal.ymwdlast.nonmembers/test_config.py
+++ /dev/null
@@ -1,5 +0,0 @@
-def build_broken(test):
- if test.case_name == "streaming.pass":
- # This is XFAIL: * upstream. No bug is filed.
- return "all", "upstream"
- return None, None
diff --git a/tests/ndk-stack/ndk_stack_systemtest.py b/tests/ndk-stack/ndk_stack_systemtest.py
deleted file mode 100755
index 8b0f39e..0000000
--- a/tests/ndk-stack/ndk_stack_systemtest.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""System tests for ndk-stack.py"""
-
-from __future__ import print_function
-
-from io import StringIO
-import os.path
-import sys
-import unittest
-from unittest.mock import patch
-
-sys.path.insert(0, "../..")
-ndk_stack = __import__("ndk-stack")
-
-import ndk.hosts # pylint:disable=wrong-import-position
-import ndk.toolchains # pylint:disable=wrong-import-position
-
-
-class SystemTests(unittest.TestCase):
- """Complete system test of ndk-stack.py script."""
-
- def setUp(self):
- default_host = ndk.hosts.get_default_host()
- clang_toolchain = ndk.toolchains.ClangToolchain(default_host)
-
- # First try and use the normal functions, and if they fail, then
- # use hard-coded paths from the development locations.
- ndk_paths = ndk_stack.get_ndk_paths()
- self.readelf = ndk_stack.find_readelf(*ndk_paths)
- if not self.readelf:
- self.readelf = clang_toolchain.clang_tool("llvm-readelf")
- self.assertTrue(self.readelf)
- self.assertTrue(os.path.exists(self.readelf))
-
- try:
- self.llvm_symbolizer = ndk_stack.find_llvm_symbolizer(*ndk_paths)
- except OSError:
- self.llvm_symbolizer = str(clang_toolchain.clang_tool("llvm-symbolizer"))
- self.assertTrue(self.llvm_symbolizer)
- self.assertTrue(os.path.exists(self.llvm_symbolizer))
-
- @patch.object(ndk_stack, "find_llvm_symbolizer")
- @patch.object(ndk_stack, "find_readelf")
- def system_test(
- self, backtrace_file, expected_file, mock_readelf, mock_llvm_symbolizer
- ):
- mock_readelf.return_value = self.readelf
- mock_llvm_symbolizer.return_value = self.llvm_symbolizer
-
- symbol_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "files")
- with patch("sys.stdout", new_callable=StringIO) as mock_stdout:
- ndk_stack.main(
- ["-s", symbol_dir, "-i", os.path.join(symbol_dir, backtrace_file)]
- )
-
- # Read the expected output.
- file_name = os.path.join(symbol_dir, expected_file)
- with open(mode="r", file=file_name) as exp_file:
- expected = exp_file.read()
- expected = expected.replace("SYMBOL_DIR", symbol_dir)
- self.maxDiff = None
- self.assertEqual(expected, mock_stdout.getvalue())
-
- def test_all_stacks(self):
- self.system_test( # pylint:disable=no-value-for-parameter
- "backtrace.txt", "expected.txt"
- )
-
- def test_multiple_crashes(self):
- self.system_test( # pylint:disable=no-value-for-parameter
- "multiple.txt", "expected_multiple.txt"
- )
-
- def test_hwasan(self):
- self.system_test( # pylint:disable=no-value-for-parameter
- "hwasan.txt", "expected_hwasan.txt"
- )
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tests/ndk-stack/pylintrc b/tests/ndk-stack/pylintrc
deleted file mode 120000
index e242284..0000000
--- a/tests/ndk-stack/pylintrc
+++ /dev/null
@@ -1 +0,0 @@
-../../pylintrc
\ No newline at end of file
diff --git a/tests/ndk-stack/__init__.py b/tests/pytest/__init__.py
similarity index 100%
copy from tests/ndk-stack/__init__.py
copy to tests/pytest/__init__.py
diff --git a/tests/ndk-stack/__init__.py b/tests/pytest/ndkstack/__init__.py
similarity index 100%
rename from tests/ndk-stack/__init__.py
rename to tests/pytest/ndkstack/__init__.py
diff --git a/tests/ndk-stack/files/backtrace.txt b/tests/pytest/ndkstack/files/backtrace.txt
similarity index 100%
rename from tests/ndk-stack/files/backtrace.txt
rename to tests/pytest/ndkstack/files/backtrace.txt
diff --git a/tests/ndk-stack/files/expected.txt b/tests/pytest/ndkstack/files/expected.txt
similarity index 94%
rename from tests/ndk-stack/files/expected.txt
rename to tests/pytest/ndkstack/files/expected.txt
index bf96b9c..64e9002 100644
--- a/tests/ndk-stack/files/expected.txt
+++ b/tests/pytest/ndkstack/files/expected.txt
@@ -1,9 +1,9 @@
********** Crash dump: **********
#00 0x0000e4fc test.apk!libbase.so (offset 0x1000)
- android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >*)
+ android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>> const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>>*)
system/core/base/file.cpp:365:0
#01 0x0000e4fc test.apk!libbase.so (offset 0x1000)
- android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >*)
+ android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>> const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>>*)
system/core/base/file.cpp:365:0
#02 0x00057250 test.apk!libc.so (offset 0x14000)
__memcpy_a15
diff --git a/tests/ndk-stack/files/expected_hwasan.txt b/tests/pytest/ndkstack/files/expected_hwasan.txt
similarity index 100%
rename from tests/ndk-stack/files/expected_hwasan.txt
rename to tests/pytest/ndkstack/files/expected_hwasan.txt
diff --git a/tests/ndk-stack/files/expected_multiple.txt b/tests/pytest/ndkstack/files/expected_multiple.txt
similarity index 100%
rename from tests/ndk-stack/files/expected_multiple.txt
rename to tests/pytest/ndkstack/files/expected_multiple.txt
diff --git a/tests/ndk-stack/files/hwasan.txt b/tests/pytest/ndkstack/files/hwasan.txt
similarity index 100%
rename from tests/ndk-stack/files/hwasan.txt
rename to tests/pytest/ndkstack/files/hwasan.txt
diff --git a/tests/ndk-stack/files/libbase.so b/tests/pytest/ndkstack/files/libbase.so
similarity index 100%
rename from tests/ndk-stack/files/libbase.so
rename to tests/pytest/ndkstack/files/libbase.so
Binary files differ
diff --git a/tests/ndk-stack/files/libc.so b/tests/pytest/ndkstack/files/libc.so
similarity index 100%
rename from tests/ndk-stack/files/libc.so
rename to tests/pytest/ndkstack/files/libc.so
Binary files differ
diff --git a/tests/ndk-stack/files/libc64.so b/tests/pytest/ndkstack/files/libc64.so
similarity index 100%
rename from tests/ndk-stack/files/libc64.so
rename to tests/pytest/ndkstack/files/libc64.so
Binary files differ
diff --git a/tests/ndk-stack/files/libutils.so b/tests/pytest/ndkstack/files/libutils.so
similarity index 100%
rename from tests/ndk-stack/files/libutils.so
rename to tests/pytest/ndkstack/files/libutils.so
Binary files differ
diff --git a/tests/ndk-stack/files/libziparchive.so b/tests/pytest/ndkstack/files/libziparchive.so
similarity index 100%
rename from tests/ndk-stack/files/libziparchive.so
rename to tests/pytest/ndkstack/files/libziparchive.so
Binary files differ
diff --git a/tests/ndk-stack/files/multiple.txt b/tests/pytest/ndkstack/files/multiple.txt
similarity index 100%
rename from tests/ndk-stack/files/multiple.txt
rename to tests/pytest/ndkstack/files/multiple.txt
diff --git a/tests/ndk-stack/files/test.apk b/tests/pytest/ndkstack/files/test.apk
similarity index 100%
rename from tests/ndk-stack/files/test.apk
rename to tests/pytest/ndkstack/files/test.apk
Binary files differ
diff --git a/tests/ndk-stack/ndk_stack_unittest.py b/tests/pytest/ndkstack/test_ndkstack.py
similarity index 65%
rename from tests/ndk-stack/ndk_stack_unittest.py
rename to tests/pytest/ndkstack/test_ndkstack.py
index c97833c..5317895 100755
--- a/tests/ndk-stack/ndk_stack_unittest.py
+++ b/tests/pytest/ndkstack/test_ndkstack.py
@@ -15,39 +15,28 @@
# limitations under the License.
#
"""Unittests for ndk-stack.py"""
-
-from __future__ import print_function
-
import os.path
-import sys
import textwrap
import unittest
-
+from io import StringIO
+from typing import Any
from unittest import mock
-from unittest.mock import patch
+from unittest.mock import Mock, patch
-try:
- # Python 2
- from cStringIO import StringIO
-except ModuleNotFoundError: # pylint:disable=undefined-variable
- # Python 3
- from io import StringIO
-
-sys.path.insert(0, "../..")
-ndk_stack = __import__("ndk-stack")
+import ndkstack
@patch("os.path.exists")
class PathTests(unittest.TestCase):
"""Tests of find_llvm_symbolizer() and find_readelf()."""
- def setUp(self):
+ def setUp(self) -> None:
self.ndk_paths = ("/ndk_fake", "/ndk_fake/bin", "linux-x86_64")
exe_suffix = ".EXE" if os.name == "nt" else ""
self.llvm_symbolizer = "llvm-symbolizer" + exe_suffix
self.readelf = "llvm-readelf" + exe_suffix
- def test_find_llvm_symbolizer_in_prebuilt(self, mock_exists):
+ def test_find_llvm_symbolizer_in_prebuilt(self, mock_exists: Mock) -> None:
expected_path = os.path.join(
"/ndk_fake",
"toolchains",
@@ -58,10 +47,12 @@
self.llvm_symbolizer,
)
mock_exists.return_value = True
- self.assertEqual(expected_path, ndk_stack.find_llvm_symbolizer(*self.ndk_paths))
+ self.assertEqual(expected_path, ndkstack.find_llvm_symbolizer(*self.ndk_paths))
mock_exists.assert_called_once_with(expected_path)
- def test_find_llvm_symbolizer_in_standalone_toolchain(self, mock_exists):
+ def test_find_llvm_symbolizer_in_standalone_toolchain(
+ self, mock_exists: Mock
+ ) -> None:
prebuilt_path = os.path.join(
"/ndk_fake",
"toolchains",
@@ -73,18 +64,18 @@
)
expected_path = os.path.join("/ndk_fake", "bin", self.llvm_symbolizer)
mock_exists.side_effect = [False, True]
- self.assertEqual(expected_path, ndk_stack.find_llvm_symbolizer(*self.ndk_paths))
+ self.assertEqual(expected_path, ndkstack.find_llvm_symbolizer(*self.ndk_paths))
mock_exists.assert_has_calls(
[mock.call(prebuilt_path), mock.call(expected_path)]
)
- def test_llvm_symbolizer_not_found(self, mock_exists):
+ def test_llvm_symbolizer_not_found(self, mock_exists: Mock) -> None:
mock_exists.return_value = False
with self.assertRaises(OSError) as cm:
- ndk_stack.find_llvm_symbolizer(*self.ndk_paths)
+ ndkstack.find_llvm_symbolizer(*self.ndk_paths)
self.assertEqual("Unable to find llvm-symbolizer", str(cm.exception))
- def test_find_readelf_in_prebuilt(self, mock_exists):
+ def test_find_readelf_in_prebuilt(self, mock_exists: Mock) -> None:
expected_path = os.path.join(
"/ndk_fake",
"toolchains",
@@ -95,10 +86,10 @@
self.readelf,
)
mock_exists.return_value = True
- self.assertEqual(expected_path, ndk_stack.find_readelf(*self.ndk_paths))
+ self.assertEqual(expected_path, ndkstack.find_readelf(*self.ndk_paths))
mock_exists.assert_called_once_with(expected_path)
- def test_find_readelf_in_prebuilt_arm(self, mock_exists):
+ def test_find_readelf_in_prebuilt_arm(self, mock_exists: Mock) -> None:
expected_path = os.path.join(
"/ndk_fake",
"toolchains",
@@ -111,29 +102,32 @@
mock_exists.return_value = True
self.assertEqual(
expected_path,
- ndk_stack.find_readelf("/ndk_fake", "/ndk_fake/bin", "linux-arm"),
+ ndkstack.find_readelf("/ndk_fake", "/ndk_fake/bin", "linux-arm"),
)
mock_exists.assert_called_once_with(expected_path)
- def test_find_readelf_in_standalone_toolchain(self, mock_exists):
+ def test_find_readelf_in_standalone_toolchain(self, mock_exists: Mock) -> None:
mock_exists.reset_mock()
expected_path = os.path.join("/ndk_fake", "bin", self.readelf)
- mock_exists.side_effect = [False, True]
- os.path.exists = lambda path, exp=expected_path: path == exp
- self.assertEqual(expected_path, ndk_stack.find_readelf(*self.ndk_paths))
- def test_readelf_not_found(self, mock_exists):
+ def mock_exists_impl(path: str) -> bool:
+ return path == expected_path
+
+ mock_exists.side_effect = mock_exists_impl
+ self.assertEqual(expected_path, ndkstack.find_readelf(*self.ndk_paths))
+
+ def test_readelf_not_found(self, mock_exists: Mock) -> None:
mock_exists.return_value = False
- self.assertFalse(ndk_stack.find_readelf(*self.ndk_paths))
+ self.assertFalse(ndkstack.find_readelf(*self.ndk_paths))
class FrameTests(unittest.TestCase):
"""Test parsing of backtrace lines."""
- def test_line_with_map_name(self):
+ def test_line_with_map_name(self) -> None:
line = " #14 pc 00001000 /fake/libfake.so"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#14", frame_info.num)
self.assertEqual("00001000", frame_info.pc)
self.assertEqual("/fake/libfake.so", frame_info.tail)
@@ -142,10 +136,10 @@
self.assertFalse(frame_info.container_file)
self.assertFalse(frame_info.build_id)
- def test_line_with_function(self):
+ def test_line_with_function(self) -> None:
line = " #08 pc 00001040 /fake/libfake.so (func())"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#08", frame_info.num)
self.assertEqual("00001040", frame_info.pc)
self.assertEqual("/fake/libfake.so (func())", frame_info.tail)
@@ -154,10 +148,10 @@
self.assertFalse(frame_info.container_file)
self.assertFalse(frame_info.build_id)
- def test_line_with_offset(self):
+ def test_line_with_offset(self) -> None:
line = " #04 pc 00002050 /fake/libfake.so (offset 0x2000)"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#04", frame_info.num)
self.assertEqual("00002050", frame_info.pc)
self.assertEqual("/fake/libfake.so (offset 0x2000)", frame_info.tail)
@@ -166,10 +160,10 @@
self.assertFalse(frame_info.container_file)
self.assertFalse(frame_info.build_id)
- def test_line_with_build_id(self):
+ def test_line_with_build_id(self) -> None:
line = " #03 pc 00002050 /fake/libfake.so (BuildId: d1d420a58366bf29f1312ec826f16564)"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#03", frame_info.num)
self.assertEqual("00002050", frame_info.pc)
self.assertEqual(
@@ -181,10 +175,10 @@
self.assertFalse(frame_info.container_file)
self.assertEqual("d1d420a58366bf29f1312ec826f16564", frame_info.build_id)
- def test_line_with_container_file(self):
+ def test_line_with_container_file(self) -> None:
line = " #10 pc 00003050 /fake/fake.apk!libc.so"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#10", frame_info.num)
self.assertEqual("00003050", frame_info.pc)
self.assertEqual("/fake/fake.apk!libc.so", frame_info.tail)
@@ -193,10 +187,10 @@
self.assertEqual("/fake/fake.apk", frame_info.container_file)
self.assertFalse(frame_info.build_id)
- def test_line_with_container_and_elf_equal(self):
+ def test_line_with_container_and_elf_equal(self) -> None:
line = " #12 pc 00004050 /fake/libc.so!lib/libc.so"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#12", frame_info.num)
self.assertEqual("00004050", frame_info.pc)
self.assertEqual("/fake/libc.so!lib/libc.so", frame_info.tail)
@@ -205,13 +199,13 @@
self.assertFalse(frame_info.container_file)
self.assertFalse(frame_info.build_id)
- def test_line_everything(self):
+ def test_line_everything(self) -> None:
line = (
" #07 pc 00823fc /fake/fake.apk!libc.so (__start_thread+64) "
"(offset 0x1000) (BuildId: 6a0c10d19d5bf39a5a78fa514371dab3)"
)
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
self.assertEqual("#07", frame_info.num)
self.assertEqual("00823fc", frame_info.pc)
self.assertEqual(
@@ -224,19 +218,28 @@
self.assertEqual("/fake/fake.apk", frame_info.container_file)
self.assertEqual("6a0c10d19d5bf39a5a78fa514371dab3", frame_info.build_id)
+ def test_0x_prefixed_address(self) -> None:
+ """Tests that addresses beginning with 0x are parsed correctly."""
+ frame_info = ndkstack.FrameInfo.from_line(
+ " #00 pc 0x000000000006263c "
+ "/apex/com.android.runtime/lib/bionic/libc.so (abort+172)"
+ )
+ assert frame_info is not None
+ assert frame_info.pc == "000000000006263c"
-@patch.object(ndk_stack, "get_build_id")
+
+@patch.object(ndkstack, "get_build_id")
@patch("os.path.exists")
class VerifyElfFileTests(unittest.TestCase):
"""Tests of verify_elf_file()."""
- def create_frame_info(self):
+ def create_frame_info(self) -> ndkstack.FrameInfo:
line = " #03 pc 00002050 /fake/libfake.so"
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
return frame_info
- def test_elf_file_does_not_exist(self, mock_exists, _):
+ def test_elf_file_does_not_exist(self, mock_exists: Mock, _: Mock) -> None:
mock_exists.return_value = False
frame_info = self.create_frame_info()
self.assertFalse(
@@ -246,7 +249,9 @@
frame_info.verify_elf_file("llvm-readelf", "/fake/libfake.so", "libfake.so")
)
- def test_elf_file_build_id_matches(self, mock_exists, mock_get_build_id):
+ def test_elf_file_build_id_matches(
+ self, mock_exists: Mock, mock_get_build_id: Mock
+ ) -> None:
mock_exists.return_value = True
frame_info = self.create_frame_info()
frame_info.build_id = "MOCKED_BUILD_ID"
@@ -263,7 +268,9 @@
)
mock_get_build_id.assert_called_once_with("llvm-readelf", "/mocked/libfake.so")
- def test_elf_file_build_id_does_not_match(self, mock_exists, mock_get_build_id):
+ def test_elf_file_build_id_does_not_match(
+ self, mock_exists: Mock, mock_get_build_id: Mock
+ ) -> None:
mock_exists.return_value = True
mock_get_build_id.return_value = "MOCKED_BUILD_ID"
frame_info = self.create_frame_info()
@@ -290,43 +297,44 @@
class GetZipInfoFromOffsetTests(unittest.TestCase):
"""Tests of get_zip_info_from_offset()."""
- def setUp(self):
+ def setUp(self) -> None:
self.mock_zip = mock.MagicMock()
self.mock_zip.filename = "/fake/zip.apk"
self.mock_zip.infolist.return_value = []
- def test_file_does_not_exist(self):
+ def test_file_does_not_exist(self) -> None:
with self.assertRaises(IOError):
- _ = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1000)
+ _ = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1000)
@patch("os.stat")
- def test_offset_ge_file_size(self, mock_stat):
+ def test_offset_ge_file_size(self, mock_stat: Mock) -> None:
mock_stat.return_value.st_size = 0x1000
- self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1000))
- self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1100))
+ self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1000))
+ self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1100))
@patch("os.stat")
- def test_empty_infolist(self, mock_stat):
+ def test_empty_infolist(self, mock_stat: Mock) -> None:
mock_stat.return_value.st_size = 0x1000
- self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x900))
+ self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x900))
@patch("os.stat")
- def test_zip_info_single_element(self, mock_stat):
+ def test_zip_info_single_element(self, mock_stat: Mock) -> None:
mock_stat.return_value.st_size = 0x2000
mock_zip_info = mock.MagicMock()
mock_zip_info.header_offset = 0x100
self.mock_zip.infolist.return_value = [mock_zip_info]
- self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x50))
+ self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x50))
- self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x2000))
+ self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x2000))
- zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x200)
+ zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x200)
+ assert zip_info is not None
self.assertEqual(0x100, zip_info.header_offset)
@patch("os.stat")
- def test_zip_info_checks(self, mock_stat):
+ def test_zip_info_checks(self, mock_stat: Mock) -> None:
mock_stat.return_value.st_size = 0x2000
mock_zip_info1 = mock.MagicMock()
@@ -335,22 +343,25 @@
mock_zip_info2.header_offset = 0x1000
self.mock_zip.infolist.return_value = [mock_zip_info1, mock_zip_info2]
- self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x50))
+ self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x50))
- zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x200)
+ zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x200)
+ assert zip_info is not None
self.assertEqual(0x100, zip_info.header_offset)
- zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x100)
+ zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x100)
+ assert zip_info is not None
self.assertEqual(0x100, zip_info.header_offset)
- zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1000)
+ zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1000)
+ assert zip_info is not None
self.assertEqual(0x1000, zip_info.header_offset)
class GetElfFileTests(unittest.TestCase):
"""Tests of FrameInfo.get_elf_file()."""
- def setUp(self):
+ def setUp(self) -> None:
self.mock_zipfile = mock.MagicMock()
self.mock_zipfile.extract.return_value = "/fake_tmp/libtest.so"
self.mock_zipfile.__enter__.return_value = self.mock_zipfile
@@ -358,14 +369,20 @@
self.mock_tmp = mock.MagicMock()
self.mock_tmp.get_directory.return_value = "/fake_tmp"
- def create_frame_info(self, tail):
+ # TODO: Refactor so this can specify a real return type.
+ # We can't specify anything more accurate than `Any` here because the real return
+ # value is a FrameInfo that's had its verify_elf_file method monkey patched with a
+ # mock.
+ def create_frame_info(self, tail: str) -> Any:
line = " #03 pc 00002050 " + tail
- frame_info = ndk_stack.FrameInfo.from_line(line)
- self.assertTrue(frame_info)
- frame_info.verify_elf_file = mock.MagicMock()
+ frame_info = ndkstack.FrameInfo.from_line(line)
+ assert frame_info is not None
+ # mypy can't (and won't) tolerate this.
+ # https://github.com/python/mypy/issues/2427
+ frame_info.verify_elf_file = mock.Mock() # type: ignore
return frame_info
- def test_file_only(self):
+ def test_file_only(self) -> None:
frame_info = self.create_frame_info("/fake/libfake.so")
frame_info.verify_elf_file.return_value = True
self.assertEqual(
@@ -379,7 +396,7 @@
)
self.assertEqual("/fake/libfake.so", frame_info.tail)
- def test_container_set_elf_in_symbol_dir(self):
+ def test_container_set_elf_in_symbol_dir(self) -> None:
frame_info = self.create_frame_info("/fake/fake.apk!libtest.so")
frame_info.verify_elf_file.return_value = True
self.assertEqual(
@@ -388,110 +405,120 @@
)
self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
- def test_container_set_elf_not_in_symbol_dir_apk_does_not_exist(self):
+ def test_container_set_elf_not_in_symbol_dir_apk_does_not_exist(self) -> None:
frame_info = self.create_frame_info("/fake/fake.apk!libtest.so")
frame_info.verify_elf_file.return_value = False
with self.assertRaises(IOError):
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp)
self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
- def test_container_set_elf_not_in_apk(self, _, mock_get_zip_info):
+ def test_container_set_elf_not_in_apk(
+ self, _: Mock, mock_get_zip_info: Mock
+ ) -> None:
mock_get_zip_info.return_value = None
- frame_info = self.create_frame_info("/fake/fake.apk!libtest.so")
+ frame_info = self.create_frame_info("/fake/fake.apk!libtest.so (offset 0x2000)")
frame_info.verify_elf_file.return_value = False
self.assertFalse(
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp)
)
- self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
+ self.assertEqual("/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
- def test_container_set_elf_in_apk(self, mock_zipclass, mock_get_zip_info):
+ def test_container_set_elf_in_apk(
+ self, mock_zipclass: Mock, mock_get_zip_info: Mock
+ ) -> None:
mock_zipclass.return_value = self.mock_zipfile
mock_get_zip_info.return_value.filename = "libtest.so"
- frame_info = self.create_frame_info("/fake/fake.apk!libtest.so")
+ frame_info = self.create_frame_info("/fake/fake.apk!libtest.so (offset 0x2000)")
frame_info.verify_elf_file.side_effect = [False, True]
self.assertEqual(
"/fake_tmp/libtest.so",
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp),
)
- self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
+ self.assertEqual("/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
def test_container_set_elf_in_apk_verify_fails(
- self, mock_zipclass, mock_get_zip_info
- ):
+ self, mock_zipclass: Mock, mock_get_zip_info: Mock
+ ) -> None:
mock_zipclass.return_value = self.mock_zipfile
mock_get_zip_info.return_value.filename = "libtest.so"
- frame_info = self.create_frame_info("/fake/fake.apk!libtest.so")
+ frame_info = self.create_frame_info("/fake/fake.apk!libtest.so (offset 0x2000)")
frame_info.verify_elf_file.side_effect = [False, False]
self.assertFalse(
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp)
)
- self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
+ self.assertEqual("/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
- def test_in_apk_file_does_not_exist(self):
+ def test_in_apk_file_does_not_exist(self) -> None:
frame_info = self.create_frame_info("/fake/fake.apk")
frame_info.verify_elf_file.return_value = False
with self.assertRaises(IOError):
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp)
self.assertEqual("/fake/fake.apk", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
- def test_in_apk_elf_not_in_apk(self, _, mock_get_zip_info):
+ def test_in_apk_elf_not_in_apk(self, _: Mock, mock_get_zip_info: Mock) -> None:
mock_get_zip_info.return_value = None
- frame_info = self.create_frame_info("/fake/fake.apk")
+ frame_info = self.create_frame_info("/fake/fake.apk (offset 0x2000)")
self.assertFalse(
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp)
)
- self.assertEqual("/fake/fake.apk", frame_info.tail)
+ self.assertEqual("/fake/fake.apk (offset 0x2000)", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
- def test_in_apk_elf_in_symbol_dir(self, mock_zipclass, mock_get_zip_info):
+ def test_in_apk_elf_in_symbol_dir(
+ self, mock_zipclass: Mock, mock_get_zip_info: Mock
+ ) -> None:
mock_zipclass.return_value = self.mock_zipfile
mock_get_zip_info.return_value.filename = "libtest.so"
- frame_info = self.create_frame_info("/fake/fake.apk")
+ frame_info = self.create_frame_info("/fake/fake.apk (offset 0x2000)")
frame_info.verify_elf_file.return_value = True
self.assertEqual(
"/fake_dir/symbols/libtest.so",
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp),
)
- self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
+ self.assertEqual("/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
- def test_in_apk_elf_in_apk(self, mock_zipclass, mock_get_zip_info):
+ def test_in_apk_elf_in_apk(
+ self, mock_zipclass: Mock, mock_get_zip_info: Mock
+ ) -> None:
mock_zipclass.return_value = self.mock_zipfile
mock_get_zip_info.return_value.filename = "libtest.so"
- frame_info = self.create_frame_info("/fake/fake.apk")
+ frame_info = self.create_frame_info("/fake/fake.apk (offset 0x2000)")
frame_info.verify_elf_file.side_effect = [False, True]
self.assertEqual(
"/fake_tmp/libtest.so",
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp),
)
- self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
+ self.assertEqual("/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
- @patch.object(ndk_stack, "get_zip_info_from_offset")
+ @patch.object(ndkstack, "get_zip_info_from_offset")
@patch("zipfile.ZipFile")
- def test_in_apk_elf_in_apk_verify_fails(self, mock_zipclass, mock_get_zip_info):
+ def test_in_apk_elf_in_apk_verify_fails(
+ self, mock_zipclass: Mock, mock_get_zip_info: Mock
+ ) -> None:
mock_zipclass.return_value = self.mock_zipfile
mock_get_zip_info.return_value.filename = "libtest.so"
- frame_info = self.create_frame_info("/fake/fake.apk")
+ frame_info = self.create_frame_info("/fake/fake.apk (offset 0x2000)")
frame_info.verify_elf_file.side_effect = [False, False]
self.assertFalse(
frame_info.get_elf_file("/fake_dir/symbols", None, self.mock_tmp)
)
- self.assertEqual("/fake/fake.apk!libtest.so", frame_info.tail)
+ self.assertEqual("/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
if __name__ == "__main__":
diff --git a/tests/pytest/ndkstack/test_systemtest.py b/tests/pytest/ndkstack/test_systemtest.py
new file mode 100755
index 0000000..6ecdaee
--- /dev/null
+++ b/tests/pytest/ndkstack/test_systemtest.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""System tests for ndk-stack.py"""
+
+import os.path
+import subprocess
+import unittest
+
+import ndk.paths
+import ndk.toolchains
+from ndk.hosts import Host
+
+
+class SystemTests(unittest.TestCase):
+ """Complete system test of ndk-stack.py script."""
+
+ def system_test(self, backtrace_file: str, expected_file: str) -> None:
+ ndk_path = ndk.paths.get_install_path()
+ self.assertTrue(
+ ndk_path.exists(),
+ f"{ndk_path} does not exist. Build the NDK before running this test.",
+ )
+
+ ndk_stack = ndk_path / "ndk-stack"
+ if Host.current() is Host.Windows64:
+ ndk_stack = ndk_stack.with_suffix(".bat")
+
+ symbol_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "files")
+ proc = subprocess.run(
+ [
+ ndk_stack,
+ "-s",
+ symbol_dir,
+ "-i",
+ os.path.join(symbol_dir, backtrace_file),
+ ],
+ check=True,
+ capture_output=True,
+ text=True,
+ )
+
+ # Read the expected output.
+ file_name = os.path.join(symbol_dir, expected_file)
+ with open(file_name, "r", encoding="utf-8") as exp_file:
+ expected = exp_file.read()
+ expected = expected.replace("SYMBOL_DIR", symbol_dir)
+ self.maxDiff = None
+ self.assertEqual(expected, proc.stdout)
+
+ def test_all_stacks(self) -> None:
+ self.system_test("backtrace.txt", "expected.txt")
+
+ def test_multiple_crashes(self) -> None:
+ self.system_test("multiple.txt", "expected_multiple.txt")
+
+ def test_hwasan(self) -> None:
+ self.system_test("hwasan.txt", "expected_hwasan.txt")
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/wrap.sh/hwasan.sh b/wrap.sh/hwasan.sh
new file mode 100644
index 0000000..88445fc
--- /dev/null
+++ b/wrap.sh/hwasan.sh
@@ -0,0 +1,2 @@
+#!/system/bin/sh
+LD_HWASAN=1 exec "$@"