diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml
index e4eed4ba46c..3781befa9a9 100644
--- a/eng/Version.Details.xml
+++ b/eng/Version.Details.xml
@@ -4,21 +4,21 @@
-
+
https://github.com/dotnet/arcade
- b9e1dd7c1e1d05679831467cd3c051b9f8f84460
+ 12fdb9fc3fe43a861f1ae01747bfe6cef878d4cb
-
+
https://github.com/dotnet/arcade
- b9e1dd7c1e1d05679831467cd3c051b9f8f84460
+ 12fdb9fc3fe43a861f1ae01747bfe6cef878d4cb
-
+
https://github.com/dotnet/arcade
- b9e1dd7c1e1d05679831467cd3c051b9f8f84460
+ 12fdb9fc3fe43a861f1ae01747bfe6cef878d4cb
-
+
https://github.com/dotnet/arcade
- b9e1dd7c1e1d05679831467cd3c051b9f8f84460
+ 12fdb9fc3fe43a861f1ae01747bfe6cef878d4cb
diff --git a/eng/Versions.props b/eng/Versions.props
index f55eb7d93a9..3a718a7add5 100644
--- a/eng/Versions.props
+++ b/eng/Versions.props
@@ -19,9 +19,9 @@
- 10.0.0-beta.24611.4
- 10.0.0-beta.24611.4
- 10.0.0-beta.24611.4
+ 10.0.0-beta.25067.3
+ 10.0.0-beta.25067.3
+ 10.0.0-beta.25067.3
diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml
index 454fd75c7af..a8c0bd3b921 100644
--- a/eng/common/core-templates/post-build/post-build.yml
+++ b/eng/common/core-templates/post-build/post-build.yml
@@ -44,6 +44,11 @@ parameters:
displayName: Publish installers and checksums
type: boolean
default: true
+
+ - name: requireDefaultChannels
+ displayName: Fail the build if there are no default channel(s) registrations for the current build
+ type: boolean
+ default: false
- name: SDLValidationParameters
type: object
@@ -312,5 +317,6 @@ stages:
-PublishingInfraVersion ${{ parameters.publishingInfraVersion }}
-AzdoToken '$(System.AccessToken)'
-WaitPublishingFinish true
+ -RequireDefaultChannels ${{ parameters.requireDefaultChannels }}
-ArtifactsPublishingAdditionalParameters '${{ parameters.artifactsPublishingAdditionalParameters }}'
-SymbolPublishingAdditionalParameters '${{ parameters.symbolPublishingAdditionalParameters }}'
diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml
index 9abe726e54b..2a6a529482b 100644
--- a/eng/common/core-templates/steps/install-microbuild.yml
+++ b/eng/common/core-templates/steps/install-microbuild.yml
@@ -1,19 +1,49 @@
parameters:
- # Enable cleanup tasks for MicroBuild
+ # Enable install tasks for MicroBuild
enableMicrobuild: false
- # Enable cleanup tasks for MicroBuild on Mac and Linux
+ # Enable install tasks for MicroBuild on Mac and Linux
# Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT'
enableMicrobuildForMacAndLinux: false
+ # Location of the MicroBuild output folder
+ microBuildOutputFolder: '$(Agent.TempDirectory)'
continueOnError: false
steps:
- ${{ if eq(parameters.enableMicrobuild, 'true') }}:
- # Remove Python downgrade with https://github.com/dotnet/arcade/issues/15151
- - ${{ if and(eq(parameters.enableMicrobuildForMacAndLinux, 'true'), ne(variables['Agent.Os'], 'Windows_NT')) }}:
+ - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, 'true') }}:
+ # Install Python 3.12.x on when Python > 3.12.x is installed - https://github.com/dotnet/source-build/issues/4802
+ - script: |
+ version=$(python3 --version | awk '{print $2}')
+ major=$(echo $version | cut -d. -f1)
+ minor=$(echo $version | cut -d. -f2)
+
+ installPython=false
+ if [ "$major" -gt 3 ] || { [ "$major" -eq 3 ] && [ "$minor" -gt 12 ]; }; then
+ installPython=true
+ fi
+
+ echo "Python version: $version."
+ echo "Install Python 3.12.x: $installPython."
+ echo "##vso[task.setvariable variable=installPython;isOutput=true]$installPython"
+ name: InstallPython
+ displayName: 'Determine Python installation'
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
+
- task: UsePythonVersion@0
- displayName: 'Use Python 3.11.x'
inputs:
- versionSpec: '3.11.x'
+ versionSpec: '3.12.x'
+ displayName: 'Use Python 3.12.x'
+ condition: and(succeeded(), eq(variables['InstallPython.installPython'], 'true'), ne(variables['Agent.Os'], 'Windows_NT'))
+
+ # Needed to download the MicroBuild plugin nupkgs on Mac and Linux when nuget.exe is unavailable
+ - task: UseDotNet@2
+ displayName: Install .NET 8.0 SDK for MicroBuild Plugin
+ inputs:
+ packageType: sdk
+ version: 8.0.x
+ installationPath: ${{ parameters.microBuildOutputFolder }}/dotnet
+ workingDirectory: ${{ parameters.microBuildOutputFolder }}
+ condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'))
- task: MicroBuildSigningPlugin@4
displayName: Install MicroBuild plugin
@@ -25,7 +55,7 @@ steps:
azureSubscription: 'MicroBuild Signing Task (DevDiv)'
env:
TeamName: $(_TeamName)
- MicroBuildOutputFolderOverride: '$(Agent.TempDirectory)'
+ MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }}
SYSTEM_ACCESSTOKEN: $(System.AccessToken)
continueOnError: ${{ parameters.continueOnError }}
condition: and(
diff --git a/eng/common/cross/build-android-rootfs.sh b/eng/common/cross/build-android-rootfs.sh
index 7e9ba2b75ed..fbd8d80848a 100755
--- a/eng/common/cross/build-android-rootfs.sh
+++ b/eng/common/cross/build-android-rootfs.sh
@@ -6,10 +6,11 @@ usage()
{
echo "Creates a toolchain and sysroot used for cross-compiling for Android."
echo
- echo "Usage: $0 [BuildArch] [ApiLevel]"
+ echo "Usage: $0 [BuildArch] [ApiLevel] [--ndk NDKVersion]"
echo
echo "BuildArch is the target architecture of Android. Currently only arm64 is supported."
echo "ApiLevel is the target Android API level. API levels usually match to Android releases. See https://source.android.com/source/build-numbers.html"
+ echo "NDKVersion is the version of Android NDK. The default is r21. See https://developer.android.com/ndk/downloads/revision_history"
echo
echo "By default, the toolchain and sysroot will be generated in cross/android-rootfs/toolchain/[BuildArch]. You can change this behavior"
echo "by setting the TOOLCHAIN_DIR environment variable"
@@ -25,10 +26,15 @@ __BuildArch=arm64
__AndroidArch=aarch64
__AndroidToolchain=aarch64-linux-android
-for i in "$@"
- do
- lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
- case $lowerI in
+while :; do
+ if [[ "$#" -le 0 ]]; then
+ break
+ fi
+
+ i=$1
+
+ lowerI="$(echo $i | tr "[:upper:]" "[:lower:]")"
+ case $lowerI in
-?|-h|--help)
usage
exit 1
@@ -43,6 +49,10 @@ for i in "$@"
__AndroidArch=arm
__AndroidToolchain=arm-linux-androideabi
;;
+ --ndk)
+ shift
+ __NDK_Version=$1
+ ;;
*[0-9])
__ApiLevel=$i
;;
@@ -50,8 +60,17 @@ for i in "$@"
__UnprocessedBuildArgs="$__UnprocessedBuildArgs $i"
;;
esac
+ shift
done
+if [[ "$__NDK_Version" == "r21" ]] || [[ "$__NDK_Version" == "r22" ]]; then
+ __NDK_File_Arch_Spec=-x86_64
+ __SysRoot=sysroot
+else
+ __NDK_File_Arch_Spec=
+ __SysRoot=toolchains/llvm/prebuilt/linux-x86_64/sysroot
+fi
+
# Obtain the location of the bash script to figure out where the root of the repo is.
__ScriptBaseDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
@@ -78,6 +97,7 @@ fi
echo "Target API level: $__ApiLevel"
echo "Target architecture: $__BuildArch"
+echo "NDK version: $__NDK_Version"
echo "NDK location: $__NDK_Dir"
echo "Target Toolchain location: $__ToolchainDir"
@@ -85,8 +105,8 @@ echo "Target Toolchain location: $__ToolchainDir"
if [ ! -d $__NDK_Dir ]; then
echo Downloading the NDK into $__NDK_Dir
mkdir -p $__NDK_Dir
- wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux-x86_64.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip
- unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux-x86_64.zip -d $__CrossDir
+ wget -q --progress=bar:force:noscroll --show-progress https://dl.google.com/android/repository/android-ndk-$__NDK_Version-linux$__NDK_File_Arch_Spec.zip -O $__CrossDir/android-ndk-$__NDK_Version-linux.zip
+ unzip -q $__CrossDir/android-ndk-$__NDK_Version-linux.zip -d $__CrossDir
fi
if [ ! -d $__lldb_Dir ]; then
@@ -116,16 +136,11 @@ for path in $(wget -qO- https://packages.termux.dev/termux-main-21/dists/stable/
fi
done
-cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/sysroot/usr/"
+cp -R "$__TmpDir/data/data/com.termux/files/usr/"* "$__ToolchainDir/$__SysRoot/usr/"
# Generate platform file for build.sh script to assign to __DistroRid
echo "Generating platform file..."
-echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/sysroot/android_platform
-
-echo "Now to build coreclr, libraries and installers; run:"
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory coreclr
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory libraries
-echo ROOTFS_DIR=\$\(realpath $__ToolchainDir/sysroot\) ./build.sh --cross --arch $__BuildArch \
- --subsetCategory installer
+echo "RID=android.${__ApiLevel}-${__BuildArch}" > $__ToolchainDir/$__SysRoot/android_platform
+
+echo "Now to build coreclr, libraries and host; run:"
+echo ROOTFS_DIR=$(realpath $__ToolchainDir/$__SysRoot) ./build.sh clr+libs+host --cross --arch $__BuildArch
diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh
index de980729790..74f399716ba 100755
--- a/eng/common/cross/build-rootfs.sh
+++ b/eng/common/cross/build-rootfs.sh
@@ -5,7 +5,7 @@ set -e
usage()
{
echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]"
- echo "BuildArch can be: arm(default), arm64, armel, armv6, ppc64le, riscv64, s390x, x64, x86"
+ echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86"
echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine"
echo " for alpine can be specified with version: alpineX.YY or alpineedge"
echo " for FreeBSD can be: freebsd13, freebsd14"
@@ -15,6 +15,7 @@ usage()
echo "llvmx[.y] - optional, LLVM version for LLVM related packages."
echo "--skipunmount - optional, will skip the unmount of rootfs folder."
echo "--skipsigcheck - optional, will skip package signature checks (allowing untrusted packages)."
+ echo "--skipemulation - optional, will skip qemu and debootstrap requirement when building environment for debian based systems."
echo "--use-mirror - optional, use mirror URL to fetch resources, when available."
echo "--jobs N - optional, restrict to N jobs."
exit 1
@@ -127,10 +128,12 @@ __AlpineKeys='
616adfeb:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq0BFD1D4lIxQcsqEpQzU\npNCYM3aP1V/fxxVdT4DWvSI53JHTwHQamKdMWtEXetWVbP5zSROniYKFXd/xrD9X\n0jiGHey3lEtylXRIPxe5s+wXoCmNLcJVnvTcDtwx/ne2NLHxp76lyc25At+6RgE6\nADjLVuoD7M4IFDkAsd8UQ8zM0Dww9SylIk/wgV3ZkifecvgUQRagrNUdUjR56EBZ\nraQrev4hhzOgwelT0kXCu3snbUuNY/lU53CoTzfBJ5UfEJ5pMw1ij6X0r5S9IVsy\nKLWH1hiO0NzU2c8ViUYCly4Fe9xMTFc6u2dy/dxf6FwERfGzETQxqZvSfrRX+GLj\n/QZAXiPg5178hT/m0Y3z5IGenIC/80Z9NCi+byF1WuJlzKjDcF/TU72zk0+PNM/H\nKuppf3JT4DyjiVzNC5YoWJT2QRMS9KLP5iKCSThwVceEEg5HfhQBRT9M6KIcFLSs\nmFjx9kNEEmc1E8hl5IR3+3Ry8G5/bTIIruz14jgeY9u5jhL8Vyyvo41jgt9sLHR1\n/J1TxKfkgksYev7PoX6/ZzJ1ksWKZY5NFoDXTNYUgzFUTOoEaOg3BAQKadb3Qbbq\nXIrxmPBdgrn9QI7NCgfnAY3Tb4EEjs3ON/BNyEhUENcXOH6I1NbcuBQ7g9P73kE4\nVORdoc8MdJ5eoKBpO8Ww8HECAwEAAQ==
616ae350:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAyduVzi1mWm+lYo2Tqt/0\nXkCIWrDNP1QBMVPrE0/ZlU2bCGSoo2Z9FHQKz/mTyMRlhNqTfhJ5qU3U9XlyGOPJ\npiM+b91g26pnpXJ2Q2kOypSgOMOPA4cQ42PkHBEqhuzssfj9t7x47ppS94bboh46\nxLSDRff/NAbtwTpvhStV3URYkxFG++cKGGa5MPXBrxIp+iZf9GnuxVdST5PGiVGP\nODL/b69sPJQNbJHVquqUTOh5Ry8uuD2WZuXfKf7/C0jC/ie9m2+0CttNu9tMciGM\nEyKG1/Xhk5iIWO43m4SrrT2WkFlcZ1z2JSf9Pjm4C2+HovYpihwwdM/OdP8Xmsnr\nDzVB4YvQiW+IHBjStHVuyiZWc+JsgEPJzisNY0Wyc/kNyNtqVKpX6dRhMLanLmy+\nf53cCSI05KPQAcGj6tdL+D60uKDkt+FsDa0BTAobZ31OsFVid0vCXtsbplNhW1IF\nHwsGXBTVcfXg44RLyL8Lk/2dQxDHNHzAUslJXzPxaHBLmt++2COa2EI1iWlvtznk\nOk9WP8SOAIj+xdqoiHcC4j72BOVVgiITIJNHrbppZCq6qPR+fgXmXa+sDcGh30m6\n9Wpbr28kLMSHiENCWTdsFij+NQTd5S47H7XTROHnalYDuF1RpS+DpQidT5tUimaT\nJZDr++FjKrnnijbyNF8b98UCAwEAAQ==
616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ==
+66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ==
'
__Keyring=
__KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg"
__SkipSigCheck=0
+__SkipEmulation=0
__UseMirror=0
__UnprocessedBuildArgs=
@@ -179,6 +182,18 @@ while :; do
__Keyring="--keyring $__KeyringFile"
fi
;;
+ loongarch64)
+ __BuildArch=loongarch64
+ __AlpineArch=loongarch64
+ __QEMUArch=loongarch64
+ __UbuntuArch=loong64
+ __UbuntuSuites=unreleased
+ __LLDB_Package="liblldb-19-dev"
+
+ if [[ "$__CodeName" == "sid" ]]; then
+ __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
+ fi
+ ;;
riscv64)
__BuildArch=riscv64
__AlpineArch=riscv64
@@ -339,10 +354,28 @@ while :; do
;;
sid) # Debian sid
__CodeName=sid
- __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+ __UbuntuSuites=
- if [[ -z "$__UbuntuRepo" ]]; then
- __UbuntuRepo="http://ftp.debian.org/debian/"
+ # Debian-Ports architectures need different values
+ case "$__UbuntuArch" in
+ amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x)
+ __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.debian.org/debian/"
+ fi
+ ;;
+ *)
+ __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg"
+
+ if [[ -z "$__UbuntuRepo" ]]; then
+ __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/"
+ fi
+ ;;
+ esac
+
+ if [[ -e "$__KeyringFile" ]]; then
+ __Keyring="--keyring $__KeyringFile"
fi
;;
tizen)
@@ -387,6 +420,9 @@ while :; do
--skipsigcheck)
__SkipSigCheck=1
;;
+ --skipemulation)
+ __SkipEmulation=1
+ ;;
--rootfsdir|-rootfsdir)
shift
__RootfsDir="$1"
@@ -419,10 +455,10 @@ case "$__AlpineVersion" in
elif [[ "$__AlpineArch" == "x86" ]]; then
__AlpineVersion=3.17 # minimum version that supports lldb-dev
__AlpinePackages+=" llvm15-libs"
- elif [[ "$__AlpineArch" == "riscv64" ]]; then
- __AlpineLlvmLibsLookup=1
- __AlpineVersion=edge # minimum version with APKINDEX.tar.gz (packages archive)
- elif [[ -n "$__AlpineVersion" ]]; then
+ elif [[ "$__AlpineArch" == "riscv64" || "$__AlpineArch" == "loongarch64" ]]; then
+ __AlpineVersion=3.21 # minimum version that supports lldb-dev
+ __AlpinePackages+=" llvm19-libs"
+ elif [[ -n "$__AlpineMajorVersion" ]]; then
# use whichever alpine version is provided and select the latest toolchain libs
__AlpineLlvmLibsLookup=1
else
@@ -505,11 +541,6 @@ if [[ "$__CodeName" == "alpine" ]]; then
echo "$__ApkToolsSHA512SUM $__ApkToolsDir/apk.static" | sha512sum -c
chmod +x "$__ApkToolsDir/apk.static"
- if [[ -f "/usr/bin/qemu-$__QEMUArch-static" ]]; then
- mkdir -p "$__RootfsDir"/usr/bin
- cp -v "/usr/bin/qemu-$__QEMUArch-static" "$__RootfsDir/usr/bin"
- fi
-
if [[ "$__AlpineVersion" == "edge" ]]; then
version=edge
else
@@ -529,6 +560,10 @@ if [[ "$__CodeName" == "alpine" ]]; then
__ApkSignatureArg="--keys-dir $__ApkKeysDir"
fi
+ if [[ "$__SkipEmulation" == "1" ]]; then
+ __NoEmulationArg="--no-scripts"
+ fi
+
# initialize DB
# shellcheck disable=SC2086
"$__ApkToolsDir/apk.static" \
@@ -550,7 +585,7 @@ if [[ "$__CodeName" == "alpine" ]]; then
"$__ApkToolsDir/apk.static" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \
-X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \
- -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \
+ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \
add $__AlpinePackages
rm -r "$__ApkToolsDir"
@@ -745,25 +780,67 @@ elif [[ "$__CodeName" == "haiku" ]]; then
popd
rm -rf "$__RootfsDir/tmp"
elif [[ -n "$__CodeName" ]]; then
+ __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)"
+
+ if [[ "$__SkipEmulation" == "1" ]]; then
+ if [[ -z "$AR" ]]; then
+ if command -v ar &>/dev/null; then
+ AR="$(command -v ar)"
+ elif command -v llvm-ar &>/dev/null; then
+ AR="$(command -v llvm-ar)"
+ else
+ echo "Unable to find ar or llvm-ar on PATH, add them to PATH or set AR environment variable pointing to the available AR tool"
+ exit 1
+ fi
+ fi
+
+ PYTHON=${PYTHON_EXECUTABLE:-python3}
+
+ # shellcheck disable=SC2086,SC2046
+ echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
+ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \
+ $__UbuntuPackages
+
+ # shellcheck disable=SC2086,SC2046
+ "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \
+ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \
+ $__UbuntuPackages
+
+ exit 0
+ fi
+ __UpdateOptions=
if [[ "$__SkipSigCheck" == "0" ]]; then
__Keyring="$__Keyring --force-check-gpg"
+ else
+ __Keyring=
+ __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories"
fi
# shellcheck disable=SC2086
echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
- debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"
+ # shellcheck disable=SC2086
+ if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then
+ echo "debootstrap failed! dumping debootstrap.log"
+ cat "$__RootfsDir/debootstrap/debootstrap.log"
+ exit 1
+ fi
+
+ rm -rf "$__RootfsDir"/etc/apt/*.{sources,list} "$__RootfsDir"/etc/apt/sources.list.d
mkdir -p "$__RootfsDir/etc/apt/sources.list.d/"
+
+ # shellcheck disable=SC2086
cat > "$__RootfsDir/etc/apt/sources.list.d/$__CodeName.sources" < token2) - (token1 < token2)
+ else:
+ return -1 if isinstance(token1, str) else 1
+
+ return len(tokens1) - len(tokens2)
+
+def compare_debian_versions(version1, version2):
+ """Compare two Debian package versions."""
+ epoch1, upstream1, revision1 = parse_debian_version(version1)
+ epoch2, upstream2, revision2 = parse_debian_version(version2)
+
+ if epoch1 != epoch2:
+ return epoch1 - epoch2
+
+ result = compare_upstream_version(upstream1, upstream2)
+ if result != 0:
+ return result
+
+ return compare_upstream_version(revision1, revision2)
+
+def resolve_dependencies(packages, aliases, desired_packages):
+ """Recursively resolves dependencies for the desired packages."""
+ resolved = []
+ to_process = deque(desired_packages)
+
+ while to_process:
+ current = to_process.popleft()
+ resolved_package = current if current in packages else aliases.get(current, [None])[0]
+
+ if not resolved_package:
+ print(f"Error: Package '{current}' was not found in the available packages.")
+ sys.exit(1)
+
+ if resolved_package not in resolved:
+ resolved.append(resolved_package)
+
+ deps = packages.get(resolved_package, {}).get("Depends", "")
+ if deps:
+ deps = [dep.split(' ')[0] for dep in deps.split(', ') if dep]
+ for dep in deps:
+ if dep not in resolved and dep not in to_process and dep in packages:
+ to_process.append(dep)
+
+ return resolved
+
+def parse_package_index(content):
+ """Parses the Packages.gz file and returns package information."""
+ packages = {}
+ aliases = {}
+ entries = re.split(r'\n\n+', content)
+
+ for entry in entries:
+ fields = dict(re.findall(r'^(\S+): (.+)$', entry, re.MULTILINE))
+ if "Package" in fields:
+ package_name = fields["Package"]
+ version = fields.get("Version")
+ filename = fields.get("Filename")
+ depends = fields.get("Depends")
+ provides = fields.get("Provides", None)
+
+ # Only update if package_name is not in packages or if the new version is higher
+ if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0:
+ packages[package_name] = {
+ "Version": version,
+ "Filename": filename,
+ "Depends": depends
+ }
+
+ # Update aliases if package provides any alternatives
+ if provides:
+ provides_list = [x.strip() for x in provides.split(",")]
+ for alias in provides_list:
+ # Strip version specifiers
+ alias_name = re.sub(r'\s*\(=.*\)', '', alias)
+ if alias_name not in aliases:
+ aliases[alias_name] = []
+ if package_name not in aliases[alias_name]:
+ aliases[alias_name].append(package_name)
+
+ return packages, aliases
+
+def install_packages(mirror, packages_info, aliases, tmp_dir, extract_dir, ar_tool, desired_packages):
+ """Downloads .deb files and extracts them."""
+ resolved_packages = resolve_dependencies(packages_info, aliases, desired_packages)
+ print(f"Resolved packages (including dependencies): {resolved_packages}")
+
+ packages_to_download = {}
+
+ for pkg in resolved_packages:
+ if pkg in packages_info:
+ packages_to_download[pkg] = packages_info[pkg]
+
+ if pkg in aliases:
+ for alias in aliases[pkg]:
+ if alias in packages_info:
+ packages_to_download[alias] = packages_info[alias]
+
+ asyncio.run(download_deb_files_parallel(mirror, packages_to_download, tmp_dir))
+
+ package_to_deb_file_map = {}
+ for pkg in resolved_packages:
+ pkg_info = packages_info.get(pkg)
+ if pkg_info:
+ deb_filename = pkg_info.get("Filename")
+ if deb_filename:
+ deb_file_path = os.path.join(tmp_dir, os.path.basename(deb_filename))
+ package_to_deb_file_map[pkg] = deb_file_path
+
+ for pkg in reversed(resolved_packages):
+ deb_file = package_to_deb_file_map.get(pkg)
+ if deb_file and os.path.exists(deb_file):
+ extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool)
+
+ print("All done!")
+
+def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool):
+ """Extract .deb file contents"""
+
+ os.makedirs(extract_dir, exist_ok=True)
+
+ with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir:
+ result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ tar_filename = None
+ for line in result.stdout.decode().splitlines():
+ if line.startswith("data.tar"):
+ tar_filename = line.strip()
+ break
+
+ if not tar_filename:
+ raise FileNotFoundError(f"Could not find 'data.tar.*' in {deb_file}.")
+
+ tar_file_path = os.path.join(tmp_subdir, tar_filename)
+ print(f"Extracting {tar_filename} from {deb_file}..")
+
+ subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True)
+
+ file_extension = os.path.splitext(tar_file_path)[1].lower()
+
+ if file_extension == ".xz":
+ mode = "r:xz"
+ elif file_extension == ".gz":
+ mode = "r:gz"
+ elif file_extension == ".zst":
+ # zstd is not supported by standard library yet
+ decompressed_tar_path = tar_file_path.replace(".zst", "")
+ with open(tar_file_path, "rb") as zst_file, open(decompressed_tar_path, "wb") as decompressed_file:
+ dctx = zstandard.ZstdDecompressor()
+ dctx.copy_stream(zst_file, decompressed_file)
+
+ tar_file_path = decompressed_tar_path
+ mode = "r"
+ else:
+ raise ValueError(f"Unsupported compression format: {file_extension}")
+
+ with tarfile.open(tar_file_path, mode) as tar:
+ tar.extractall(path=extract_dir, filter='fully_trusted')
+
+def finalize_setup(rootfsdir):
+ lib_dir = os.path.join(rootfsdir, 'lib')
+ usr_lib_dir = os.path.join(rootfsdir, 'usr', 'lib')
+
+ if os.path.exists(lib_dir):
+ if os.path.islink(lib_dir):
+ os.remove(lib_dir)
+ else:
+ os.makedirs(usr_lib_dir, exist_ok=True)
+
+ for item in os.listdir(lib_dir):
+ src = os.path.join(lib_dir, item)
+ dest = os.path.join(usr_lib_dir, item)
+
+ if os.path.isdir(src):
+ shutil.copytree(src, dest, dirs_exist_ok=True)
+ else:
+ shutil.copy2(src, dest)
+
+ shutil.rmtree(lib_dir)
+
+ os.symlink(usr_lib_dir, lib_dir)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS")
+ parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)")
+ parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)")
+ parser.add_argument("--rootfsdir", required=True, help="Destination directory.")
+ parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.')
+ parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)")
+ parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)")
+ parser.add_argument("packages", nargs="+", help="List of package names to be installed.")
+
+ args = parser.parse_args()
+
+ if args.mirror is None:
+ if args.distro == "ubuntu":
+ args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports"
+ elif args.distro == "debian":
+ args.mirror = "http://ftp.debian.org/debian-ports"
+ else:
+ raise Exception("Unsupported distro")
+
+ DESIRED_PACKAGES = args.packages + [ # base packages
+ "dpkg",
+ "busybox",
+ "libc-bin",
+ "base-files",
+ "base-passwd",
+ "debianutils"
+ ]
+
+ print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}")
+
+ package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite))
+
+ packages_info, aliases = parse_package_index(package_index_content)
+
+ with tempfile.TemporaryDirectory() as tmp_dir:
+ install_packages(args.mirror, packages_info, aliases, tmp_dir, args.rootfsdir, args.artool, DESIRED_PACKAGES)
+
+ finalize_setup(args.rootfsdir)
diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh
index 3eef7409f72..71bde0e4573 100644
--- a/eng/common/native/install-dependencies.sh
+++ b/eng/common/native/install-dependencies.sh
@@ -27,7 +27,7 @@ case "$os" in
libssl-dev libkrb5-dev zlib1g-dev pigz cpio
localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
- elif [ "$ID" = "fedora" ]; then
+ elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ]; then
dnf install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel zlib-devel lttng-ust-devel pigz cpio
elif [ "$ID" = "alpine" ]; then
apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev zlib-dev openssl-dev pigz cpio
diff --git a/eng/common/post-build/publish-using-darc.ps1 b/eng/common/post-build/publish-using-darc.ps1
index 90b58e32a87..a261517ef90 100644
--- a/eng/common/post-build/publish-using-darc.ps1
+++ b/eng/common/post-build/publish-using-darc.ps1
@@ -5,7 +5,8 @@ param(
[Parameter(Mandatory=$false)][string] $MaestroApiEndPoint = 'https://maestro.dot.net',
[Parameter(Mandatory=$true)][string] $WaitPublishingFinish,
[Parameter(Mandatory=$false)][string] $ArtifactsPublishingAdditionalParameters,
- [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters
+ [Parameter(Mandatory=$false)][string] $SymbolPublishingAdditionalParameters,
+ [Parameter(Mandatory=$false)][string] $RequireDefaultChannels
)
try {
@@ -33,6 +34,10 @@ try {
if ("false" -eq $WaitPublishingFinish) {
$optionalParams.Add("--no-wait") | Out-Null
}
+
+ if ("true" -eq $RequireDefaultChannels) {
+ $optionalParams.Add("--default-channels-required") | Out-Null
+ }
& $darc add-build-to-channel `
--id $buildId `
diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1
index bd80ccccb51..80f9130b150 100644
--- a/eng/common/tools.ps1
+++ b/eng/common/tools.ps1
@@ -42,7 +42,7 @@
[bool]$useInstalledDotNetCli = if (Test-Path variable:useInstalledDotNetCli) { $useInstalledDotNetCli } else { $true }
# Enable repos to use a particular version of the on-line dotnet-install scripts.
-# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.ps1
+# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1
[string]$dotnetInstallScriptVersion = if (Test-Path variable:dotnetInstallScriptVersion) { $dotnetInstallScriptVersion } else { 'v1' }
# True to use global NuGet cache instead of restoring packages to repository-local directory.
@@ -262,7 +262,7 @@ function GetDotNetInstallScript([string] $dotnetRoot) {
if (!(Test-Path $installScript)) {
Create-Directory $dotnetRoot
$ProgressPreference = 'SilentlyContinue' # Don't display the console progress UI - it's a huge perf hit
- $uri = "https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.ps1"
+ $uri = "https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.ps1"
Retry({
Write-Host "GET $uri"
@@ -320,7 +320,7 @@ function InstallDotNet([string] $dotnetRoot,
$variations += @($installParameters)
$dotnetBuilds = $installParameters.Clone()
- $dotnetbuilds.AzureFeed = "https://dotnetbuilds.azureedge.net/public"
+ $dotnetbuilds.AzureFeed = "https://ci.dot.net/public"
$variations += @($dotnetBuilds)
if ($runtimeSourceFeed) {
diff --git a/eng/common/tools.sh b/eng/common/tools.sh
index 79b4a28e170..df203b51784 100755
--- a/eng/common/tools.sh
+++ b/eng/common/tools.sh
@@ -54,7 +54,7 @@ warn_as_error=${warn_as_error:-true}
use_installed_dotnet_cli=${use_installed_dotnet_cli:-true}
# Enable repos to use a particular version of the on-line dotnet-install scripts.
-# default URL: https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh
+# default URL: https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh
dotnetInstallScriptVersion=${dotnetInstallScriptVersion:-'v1'}
# True to use global NuGet cache instead of restoring packages to repository-local directory.
@@ -232,7 +232,7 @@ function InstallDotNet {
local public_location=("${installParameters[@]}")
variations+=(public_location)
- local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://dotnetbuilds.azureedge.net/public")
+ local dotnetbuilds=("${installParameters[@]}" --azure-feed "https://ci.dot.net/public")
variations+=(dotnetbuilds)
if [[ -n "${6:-}" ]]; then
@@ -295,7 +295,7 @@ function with_retries {
function GetDotNetInstallScript {
local root=$1
local install_script="$root/dotnet-install.sh"
- local install_script_url="https://dotnet.microsoft.com/download/dotnet/scripts/$dotnetInstallScriptVersion/dotnet-install.sh"
+ local install_script_url="https://builds.dotnet.microsoft.com/dotnet/scripts/v1/dotnet-install.sh"
if [[ ! -a "$install_script" ]]; then
mkdir -p "$root"
diff --git a/global.json b/global.json
index d998257091a..fe8e276ebf7 100644
--- a/global.json
+++ b/global.json
@@ -1,6 +1,6 @@
{
"tools": {
- "dotnet": "10.0.100-alpha.1.24573.1",
+ "dotnet": "10.0.100-alpha.1.25064.3",
"runtimes": {
"aspnetcore": [
"3.1.5",
@@ -11,10 +11,10 @@
}
},
"sdk": {
- "version": "10.0.100-alpha.1.24573.1"
+ "version": "10.0.100-alpha.1.25064.3"
},
"msbuild-sdks": {
- "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.24611.4",
+ "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.25067.3",
"Microsoft.DotNet.Helix.Sdk": "8.0.0-beta.23409.5"
}
}